code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
from __future__ import absolute_import
from copy import deepcopy
from datetime import datetime
from django.core.exceptions import MultipleObjectsReturned
from django.test import TestCase
from django.utils.translation import ugettext_lazy
from .models import Article, Reporter
class ManyToOneTests(TestCase):
def setUp(self):
# Create a few Reporters.
self.r = Reporter(first_name='John', last_name='Smith', email='john@example.com')
self.r.save()
self.r2 = Reporter(first_name='Paul', last_name='Jones', email='paul@example.com')
self.r2.save()
# Create an Article.
self.a = Article(id=None, headline="This is a test",
pub_date=datetime(2005, 7, 27), reporter=self.r)
self.a.save()
def test_get(self):
# Article objects have access to their related Reporter objects.
r = self.a.reporter
self.assertEqual(r.id, self.r.id)
# These are strings instead of unicode strings because that's what was used in
# the creation of this reporter (and we haven't refreshed the data from the
# database, which always returns unicode strings).
self.assertEqual((r.first_name, self.r.last_name), ('John', 'Smith'))
def test_create(self):
# You can also instantiate an Article by passing the Reporter's ID
# instead of a Reporter object.
a3 = Article(id=None, headline="Third article",
pub_date=datetime(2005, 7, 27), reporter_id=self.r.id)
a3.save()
self.assertEqual(a3.reporter.id, self.r.id)
# Similarly, the reporter ID can be a string.
a4 = Article(id=None, headline="Fourth article",
pub_date=datetime(2005, 7, 27), reporter_id=str(self.r.id))
a4.save()
self.assertEqual(repr(a4.reporter), "<Reporter: John Smith>")
def test_add(self):
# Create an Article via the Reporter object.
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime(2005, 7, 29))
self.assertEqual(repr(new_article), "<Article: John's second story>")
self.assertEqual(new_article.reporter.id, self.r.id)
# Create a new article, and add it to the article set.
new_article2 = Article(headline="Paul's story", pub_date=datetime(2006, 1, 17))
self.r.article_set.add(new_article2)
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
# Add the same article to a different article set - check that it moves.
self.r2.article_set.add(new_article2)
self.assertEqual(new_article2.reporter.id, self.r2.id)
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Adding an object of the wrong type raises TypeError.
with self.assertRaisesRegexp(TypeError, "'Article' instance expected, got <Reporter.*"):
self.r.article_set.add(self.r2)
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
def test_assign(self):
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime(2005, 7, 29))
new_article2 = self.r2.article_set.create(headline="Paul's story",
pub_date=datetime(2006, 1, 17))
# Assign the article to the reporter directly using the descriptor.
new_article2.reporter = self.r
new_article2.save()
self.assertEqual(repr(new_article2.reporter), "<Reporter: John Smith>")
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), [])
# Set the article back again using set descriptor.
self.r2.article_set = [new_article, new_article2]
self.assertQuerysetEqual(self.r.article_set.all(), ["<Article: This is a test>"])
self.assertQuerysetEqual(self.r2.article_set.all(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
])
# Funny case - assignment notation can only go so far; because the
# ForeignKey cannot be null, existing members of the set must remain.
self.r.article_set = [new_article]
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Reporter cannot be null - there should not be a clear or remove method
self.assertFalse(hasattr(self.r2.article_set, 'remove'))
self.assertFalse(hasattr(self.r2.article_set, 'clear'))
def test_selects(self):
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime(2005, 7, 29))
new_article2 = self.r2.article_set.create(headline="Paul's story",
pub_date=datetime(2006, 1, 17))
# Reporter objects have access to their related Article objects.
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r.article_set.filter(headline__startswith='This'),
["<Article: This is a test>"])
self.assertEqual(self.r.article_set.count(), 2)
self.assertEqual(self.r2.article_set.count(), 1)
# Get articles by id
self.assertQuerysetEqual(Article.objects.filter(id__exact=self.a.id),
["<Article: This is a test>"])
self.assertQuerysetEqual(Article.objects.filter(pk=self.a.id),
["<Article: This is a test>"])
# Query on an article property
self.assertQuerysetEqual(Article.objects.filter(headline__startswith='This'),
["<Article: This is a test>"])
# The API automatically follows relationships as far as you need.
# Use double underscores to separate relationships.
# This works as many levels deep as you want. There's no limit.
# Find all Articles for any Reporter whose first name is "John".
self.assertQuerysetEqual(Article.objects.filter(reporter__first_name__exact='John'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Check that implied __exact also works
self.assertQuerysetEqual(Article.objects.filter(reporter__first_name='John'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Query twice over the related field.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John',
reporter__last_name__exact='Smith'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# The underlying query only makes one join when a related table is referenced twice.
queryset = Article.objects.filter(reporter__first_name__exact='John',
reporter__last_name__exact='Smith')
self.assertNumQueries(1, list, queryset)
self.assertEqual(queryset.query.get_compiler(queryset.db).as_sql()[0].count('INNER JOIN'), 1)
# The automatically joined table has a predictable name.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John').extra(
where=["many_to_one_reporter.last_name='Smith'"]),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# ... and should work fine with the unicode that comes out of forms.Form.cleaned_data
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John'
).extra(where=["many_to_one_reporter.last_name='%s'" % 'Smith']),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Find all Articles for a Reporter.
# Use direct ID check, pk check, and object comparison
self.assertQuerysetEqual(
Article.objects.filter(reporter__id__exact=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__pk=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r.id,self.r2.id]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r,self.r2]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
# You can also use a queryset instead of a literal list of instances.
# The queryset must be reduced to a list of values using values(),
# then converted into a query
self.assertQuerysetEqual(
Article.objects.filter(
reporter__in=Reporter.objects.filter(first_name='John').values('pk').query
).distinct(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
def test_reverse_selects(self):
a3 = Article.objects.create(id=None, headline="Third article",
pub_date=datetime(2005, 7, 27), reporter_id=self.r.id)
a4 = Article.objects.create(id=None, headline="Fourth article",
pub_date=datetime(2005, 7, 27), reporter_id=str(self.r.id))
# Reporters can be queried
self.assertQuerysetEqual(Reporter.objects.filter(id__exact=self.r.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(pk=self.r.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(first_name__startswith='John'),
["<Reporter: John Smith>"])
# Reporters can query in opposite direction of ForeignKey definition
self.assertQuerysetEqual(Reporter.objects.filter(article__id__exact=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article__pk=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a.id,a3.id]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a.id,a3]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a,a3]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__headline__startswith='T'),
["<Reporter: John Smith>", "<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__headline__startswith='T').distinct(),
["<Reporter: John Smith>"])
# Counting in the opposite direction works in conjunction with distinct()
self.assertEqual(
Reporter.objects.filter(article__headline__startswith='T').count(), 2)
self.assertEqual(
Reporter.objects.filter(article__headline__startswith='T').distinct().count(), 1)
# Queries can go round in circles.
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John'),
[
"<Reporter: John Smith>",
"<Reporter: John Smith>",
"<Reporter: John Smith>",
])
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John').distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__exact=self.r).distinct(),
["<Reporter: John Smith>"])
# Check that implied __exact also works.
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter=self.r).distinct(),
["<Reporter: John Smith>"])
# It's possible to use values() calls across many-to-one relations.
# (Note, too, that we clear the ordering here so as not to drag the
# 'headline' field into the columns being used to determine uniqueness)
d = {'reporter__first_name': 'John', 'reporter__last_name': 'Smith'}
self.assertEqual([d],
list(Article.objects.filter(reporter=self.r).distinct().order_by()
.values('reporter__first_name', 'reporter__last_name')))
def test_select_related(self):
# Check that Article.objects.select_related().dates() works properly when
# there are multiple Articles with the same date but different foreign-key
# objects (Reporters).
r1 = Reporter.objects.create(first_name='Mike', last_name='Royko', email='royko@suntimes.com')
r2 = Reporter.objects.create(first_name='John', last_name='Kass', email='jkass@tribune.com')
a1 = Article.objects.create(headline='First', pub_date=datetime(1980, 4, 23), reporter=r1)
a2 = Article.objects.create(headline='Second', pub_date=datetime(1980, 4, 23), reporter=r2)
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'day')),
[
datetime(1980, 4, 23, 0, 0),
datetime(2005, 7, 27, 0, 0),
])
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'month')),
[
datetime(1980, 4, 1, 0, 0),
datetime(2005, 7, 1, 0, 0),
])
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'year')),
[
datetime(1980, 1, 1, 0, 0),
datetime(2005, 1, 1, 0, 0),
])
def test_delete(self):
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime(2005, 7, 29))
new_article2 = self.r2.article_set.create(headline="Paul's story",
pub_date=datetime(2006, 1, 17))
a3 = Article.objects.create(id=None, headline="Third article",
pub_date=datetime(2005, 7, 27), reporter_id=self.r.id)
a4 = Article.objects.create(id=None, headline="Fourth article",
pub_date=datetime(2005, 7, 27), reporter_id=str(self.r.id))
# If you delete a reporter, his articles will be deleted.
self.assertQuerysetEqual(Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: Third article>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(Reporter.objects.order_by('first_name'),
[
"<Reporter: John Smith>",
"<Reporter: Paul Jones>",
])
self.r2.delete()
self.assertQuerysetEqual(Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Third article>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(Reporter.objects.order_by('first_name'),
["<Reporter: John Smith>"])
# You can delete using a JOIN in the query.
Reporter.objects.filter(article__headline__startswith='This').delete()
self.assertQuerysetEqual(Reporter.objects.all(), [])
self.assertQuerysetEqual(Article.objects.all(), [])
def test_regression_12876(self):
# Regression for #12876 -- Model methods that include queries that
# recursive don't cause recursion depth problems under deepcopy.
self.r.cached_query = Article.objects.filter(reporter=self.r)
self.assertEqual(repr(deepcopy(self.r)), "<Reporter: John Smith>")
def test_explicit_fk(self):
# Create a new Article with get_or_create using an explicit value
# for a ForeignKey.
a2, created = Article.objects.get_or_create(id=None,
headline="John's second test",
pub_date=datetime(2011, 5, 7),
reporter_id=self.r.id)
self.assertTrue(created)
self.assertEqual(a2.reporter.id, self.r.id)
# You can specify filters containing the explicit FK value.
self.assertQuerysetEqual(
Article.objects.filter(reporter_id__exact=self.r.id),
[
"<Article: John's second test>",
"<Article: This is a test>",
])
# Create an Article by Paul for the same date.
a3 = Article.objects.create(id=None, headline="Paul's commentary",
pub_date=datetime(2011, 5, 7),
reporter_id=self.r2.id)
self.assertEqual(a3.reporter.id, self.r2.id)
# Get should respect explicit foreign keys as well.
self.assertRaises(MultipleObjectsReturned,
Article.objects.get, reporter_id=self.r.id)
self.assertEqual(repr(a3),
repr(Article.objects.get(reporter_id=self.r2.id,
pub_date=datetime(2011, 5, 7))))
def test_manager_class_caching(self):
r1 = Reporter.objects.create(first_name='Mike')
r2 = Reporter.objects.create(first_name='John')
# Same twice
self.assertTrue(r1.article_set.__class__ is r1.article_set.__class__)
# Same as each other
self.assertTrue(r1.article_set.__class__ is r2.article_set.__class__)
def test_create_relation_with_ugettext_lazy(self):
reporter = Reporter.objects.create(first_name='John',
last_name='Smith',
email='john.smith@example.com')
lazy = ugettext_lazy('test')
reporter.article_set.create(headline=lazy,
pub_date=datetime(2011, 6, 10))
notlazy = unicode(lazy)
article = reporter.article_set.get()
self.assertEqual(article.headline, notlazy)
| rebost/django | tests/modeltests/many_to_one/tests.py | Python | bsd-3-clause | 20,874 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp import tools
from openerp.osv import osv
from openerp.osv import fields
from openerp.tools.translate import _
class invite_wizard(osv.osv_memory):
""" Wizard to invite partners and make them followers. """
_name = 'mail.wizard.invite'
_description = 'Invite wizard'
def default_get(self, cr, uid, fields, context=None):
result = super(invite_wizard, self).default_get(cr, uid, fields, context=context)
if 'message' in fields and result.get('res_model') and result.get('res_id'):
document_name = self.pool.get(result.get('res_model')).name_get(cr, uid, [result.get('res_id')], context=context)[0][1]
message = _('<div>You have been invited to follow %s.</div>' % document_name)
result['message'] = message
elif 'message' in fields:
result['message'] = _('<div>You have been invited to follow a new document.</div>')
return result
_columns = {
'res_model': fields.char('Related Document Model', size=128,
required=True, select=1,
help='Model of the followed resource'),
'res_id': fields.integer('Related Document ID', select=1,
help='Id of the followed resource'),
'partner_ids': fields.many2many('res.partner', string='Partners'),
'message': fields.html('Message'),
}
def add_followers(self, cr, uid, ids, context=None):
for wizard in self.browse(cr, uid, ids, context=context):
model_obj = self.pool.get(wizard.res_model)
document = model_obj.browse(cr, uid, wizard.res_id, context=context)
# filter partner_ids to get the new followers, to avoid sending email to already following partners
new_follower_ids = [p.id for p in wizard.partner_ids if p.id not in document.message_follower_ids]
model_obj.message_subscribe(cr, uid, [wizard.res_id], new_follower_ids, context=context)
# send an email
if wizard.message:
# add signature
user_id = self.pool.get("res.users").read(cr, uid, [uid], fields=["signature"], context=context)[0]
signature = user_id and user_id["signature"] or ''
if signature:
wizard.message = tools.append_content_to_html(wizard.message, signature, plaintext=True, container_tag='div')
# send mail to new followers
for follower_id in new_follower_ids:
mail_mail = self.pool.get('mail.mail')
# the invite wizard should create a private message not related to any object -> no model, no res_id
mail_id = mail_mail.create(cr, uid, {
'model': wizard.res_model,
'res_id': wizard.res_id,
'subject': 'Invitation to follow %s' % document.name_get()[0][1],
'body_html': '%s' % wizard.message,
'auto_delete': True,
}, context=context)
mail_mail.send(cr, uid, [mail_id], recipient_ids=[follower_id], context=context)
return {'type': 'ir.actions.act_window_close'}
| tedi3231/openerp | openerp/addons/mail/wizard/invite.py | Python | agpl-3.0 | 4,210 |
# -*- coding: utf-8 -*-
import copy
from ruamel.yaml import YAML
from six import iteritems
_required = ['server']
class Config(object):
def __init__(self, configFile):
self.configFile = configFile
self._configData = {}
self.yaml = YAML()
self._inBaseConfig = []
def loadConfig(self):
configData = self._readConfig(self.configFile)
self._validate(configData)
self._configData = configData
def _readConfig(self, fileName):
try:
with open(fileName, mode='r') as config:
configData = self.yaml.load(config)
if not configData:
configData = {}
# if this is the base server config, store what keys we loaded
if fileName == self.configFile:
self._inBaseConfig = list(configData.keys())
except Exception as e:
raise ConfigError(fileName, e)
if 'import' not in configData:
return configData
for fname in configData['import']:
includeConfig = self._readConfig('configs/{}.yaml'.format(fname))
for key, val in iteritems(includeConfig):
# not present in base config, just assign it
if key not in configData:
configData[key] = val
continue
# skip non-collection types that are already set
if isinstance(configData[key], (str, int)):
continue
if isinstance(val, str):
raise ConfigError(fname, 'The included config file tried '
'to merge a non-string with a '
'string')
try:
iter(configData[key])
iter(val)
except TypeError:
# not a collection, so just don't merge them
pass
else:
try:
# merge with + operator
configData[key] += val
except TypeError:
# dicts can't merge with +
try:
for subKey, subVal in iteritems(val):
if subKey not in configData[key]:
configData[key][subKey] = subVal
except (AttributeError, TypeError):
# if either of these, they weren't both dicts.
raise ConfigError(fname, 'The variable {!r} could '
'not be successfully '
'merged'.format(key))
return configData
def writeConfig(self):
# filter the configData to only those keys
# that were present in the base server config,
# or have been modified at runtime
configData = copy.deepcopy(self._configData)
to_delete = set(configData.keys()).difference(self._inBaseConfig)
for key in to_delete:
del configData[key]
# write the filtered configData
try:
with open(self.configFile, mode='w') as config:
self.yaml.dump(configData, config)
except Exception as e:
raise ConfigError(self.configFile, e)
def getWithDefault(self, key, default=None):
if key in self._configData:
return self._configData[key]
return default
def _validate(self, configData):
for key in _required:
if key not in configData:
raise ConfigError(self.configFile, 'Required item {!r} was not found in the config.'.format(key))
def __len__(self):
return len(self._configData)
def __iter__(self):
return iter(self._configData)
def __getitem__(self, key):
return self._configData[key]
def __setitem__(self, key, value):
# mark this key to be saved in the server config
if key not in self._inBaseConfig:
self._inBaseConfig.append(key)
self._configData[key] = value
def __contains__(self, key):
return key in self._configData
class ConfigError(Exception):
def __init__(self, configFile, message):
self.configFile = configFile
self.message = message
def __str__(self):
return 'An error occurred while reading config file {}: {}'.format(self.configFile,
self.message)
| MatthewCox/PyMoronBot | pymoronbot/config.py | Python | mit | 4,668 |
import sublime, sublime_plugin
import os
class print_file(sublime_plugin.TextCommand):
def run(self, edit):
os.system('cat -n "%s" > tmp.print; lpr tmp.print' % self.view.file_name())
self.view.show_popup("JIZZ!!")
| NCTU-PCCA/NCTU_Yggdrasill | codebook/ToBeClassify/print_file.py | Python | mit | 221 |
### Attribute IDS
CONTROLLER = 500
RULES = 1000
IS_PREMADE_GAME = 1001
PARTIES_PRIVATE = 2000
PARTIES_PREMADE = 2001
PARTIES_PREMADE_1V1 = 2002
PARTIES_PREMADE_2V2 = 2003
PARTIES_PREMADE_3V3 = 2004
PARTIES_PREMADE_4V4 = 2005
PARTIES_PREMADE_FFA = 2006
PARTIES_PREMADE_5V5 = 2007
PARTIES_PREMADE_6V6 = 2008
PARTIES_PRIVATE_ONE = 2010
PARTIES_PRIVATE_TWO = 2011
PARTIES_PRIVATE_THREE = 2012
PARTIES_PRIVATE_FOUR = 2013
PARTIES_PRIVATE_FIVE = 2014
PARTIES_PRIVATE_SIX = 2015
PARTIES_PRIVATE_SEVEN = 2016
PARTIES_PRIVATE_FFA = 2017
PARTIES_PRIVATE_CUSTOM = 2018
PARTIES_PRIVATE_EIGHT = 2019
PARTIES_PRIVATE_NINE = 2020
PARTIES_PRIVATE_TEN = 2021
PARTIES_PRIVATE_ELEVEN = 2022
PARTIES_PRIVATE_FFA_TANDEM = 2023
PARTIES_PRIVATE_CUSTOM_TANDEM = 2024
GAME_SPEED = 3000
RACE = 3001
PARTY_COLOR = 3002
HANDICAP = 3003
AI_SKILL = 3004
AI_RACE = 3005
LOBBY_DELAY = 3006
PARTICIPANT_ROLE = 3007
WATCHER_TYPE = 3008
GAME_MODE = 3009
LOCKED_ALLIANCES = 3010
PLAYER_LOGO = 3011
TANDEM_LEADER = 3012
COMMANDER = 3013
COMMANDER_LEVEL = 3014
GAME_DURATION = 3015
COMMANDER_MASTERY_LEVEL = 3016
AI_BUILD_FIRST = 3100
AI_BUILD_LAST = 3300
PRIVACY_OPTION = 4000
USING_CUSTOM_OBSERVER_UI = 4001
CAN_READY = 4009
LOBBY_MODE = 4010
READY_ORDER_DEPRECATED = 4011
ACTIVE_TEAM = 4012
LOBBY_PHASE = 4015
READYING_COUNT_DEPRECATED = 4016
ACTIVE_ROUND = 4017
READY_MODE = 4018
READY_REQUIREMENTS = 4019
FIRST_ACTIVE_TEAM = 4020
COMMANDER_MASTERY_TALENT_FIRST = 5000
COMMANDER_MASTERY_TALENT_LAST = 5005
| Blizzard/s2protocol | s2protocol/attributes.py | Python | mit | 1,472 |
"""
Wrapper for pcpe_core external module.
"""
from pypcpe2 import utility
import pcpe2_core
def compare_small_seqs(x_seq_path, y_seq_path):
"""
Wrapper for pcpe2_core.compare_small_seqs.
Users should not call the orignal function directly.
"""
return pcpe2_core.compare_small_seqs(x_seq_path, y_seq_path)
def sort_comsubseq_files(input_paths):
"""
Wrapper for pcpe2_core.sort_comsubseq_files.
Users should not call the orignal function directly.
"""
return pcpe2_core.sort_comsubseq_files(input_paths)
def max_sorted_comsubsq_files(input_paths):
"""
Wrapper for pcpe2_core.max_sorted_comsubsq_files.
Users should not call the orignal function directly.
"""
return pcpe2_core.max_sorted_comsubsq_files(input_paths)
def compare_seqs(x_seq_path, y_seq_path):
"""
Find all common subseqences for the two seqeuence files.
Args:
x_seq_path (str): The input sequence file
y_seq_path (str): The compared input sequence file
Return:
A string present a path which contains list of ComSubseq
"""
seq_paths = compare_small_seqs(x_seq_path, y_seq_path)
sorted_paths = sort_comsubseq_files(seq_paths)
max_seq_paths = max_sorted_comsubsq_files(sorted_paths)
output_path = utility.make_temp_path("cs_result.bin")
utility.merge_file(max_seq_paths, output_path)
return output_path
| yen3/pypcpe2 | pypcpe2/core.py | Python | bsd-3-clause | 1,407 |
# RandTalkBot Bot matching you with a random person on Telegram.
# Copyright (C) 2016 quasiyoke
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
from unittest.mock import create_autospec
import asynctest
from asynctest.mock import call, patch, Mock, CoroutineMock
from peewee import SqliteDatabase
from randtalkbot import stranger
from randtalkbot.errors import MissingPartnerError, StrangerError
from randtalkbot.stranger import Stranger
from randtalkbot.stranger_sender import StrangerSenderError
from randtalkbot.stranger_sender_service import StrangerSenderService
from telepot.exception import TelegramError
DATABASE = SqliteDatabase(':memory:')
stranger.DATABASE_PROXY.initialize(DATABASE)
class TestStranger(asynctest.TestCase):
def setUp(self):
DATABASE.create_tables([Stranger])
self.stranger = Stranger.create(
invitation='foo',
telegram_id=31416,
)
self.stranger2 = Stranger.create(
invitation='bar',
telegram_id=27183,
)
self.stranger3 = Stranger.create(
invitation='baz',
telegram_id=23571,
)
self.stranger4 = Stranger.create(
invitation='zig',
telegram_id=11317,
)
def tearDown(self):
DATABASE.drop_tables([Stranger])
@asynctest.ignore_loop
def test_init(self):
stranger_instance = Stranger.get(Stranger.telegram_id == 31416)
self.assertEqual(stranger_instance.looking_for_partner_from, None)
@patch('randtalkbot.stranger.INVITATION_LENGTH', 5)
@asynctest.ignore_loop
def test_get_invitation(self):
invitation = Stranger.get_invitation()
self.assertIsInstance(invitation, str)
self.assertEqual(len(invitation), 5)
async def test_add_bonuses__ok(self):
self.stranger.bonus_count = 1000
self.stranger._notify_about_bonuses = CoroutineMock()
self.stranger.save = Mock()
await self.stranger._add_bonuses(31415)
self.stranger.save.assert_called_once_with()
self.assertEqual(self.stranger.bonus_count, 32415)
self.stranger._notify_about_bonuses.assert_called_once_with(31415)
async def test_add_bonuses__muted(self):
self.stranger.bonus_count = 1000
self.stranger._notify_about_bonuses = CoroutineMock()
self.stranger.save = Mock()
self.stranger._bonuses_notifications_muted = True
await self.stranger._add_bonuses(1)
self.stranger.save.assert_called_once_with()
self.assertEqual(self.stranger.bonus_count, 1001)
self.stranger._notify_about_bonuses.assert_not_called()
@patch('randtalkbot.stranger.asyncio.sleep', CoroutineMock())
@patch('randtalkbot.stranger.StatsService')
async def test_advertise__people_are_searching_chat_lacks_males(self, stats_service_mock):
from randtalkbot.stranger import asyncio as asyncio_mock
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_start_args = Mock(return_value='foo_start_args')
self.stranger.looking_for_partner_from = datetime.datetime.utcnow()
self.stranger.save()
self.stranger2.looking_for_partner_from = datetime.datetime.utcnow()
self.stranger2.save()
stats_service_mock.get_instance \
.return_value \
.get_stats \
.return_value \
.get_sex_ratio \
.return_value = 0.9
await self.stranger._advertise()
asyncio_mock.sleep.assert_called_once_with(30)
self.assertEqual(
sender.send_notification.call_args_list,
[
call(
'The search is going on. {0} users are looking for partner -- change your'
' preferences (languages, partner\'s sex) using /setup command to talk with'
' them.\n'
'Chat *lacks males!* Send the link to your friends and earn {1} bonuses for'
' every invited male and {2} bonus for each female (the more bonuses you'
' have -- the faster partner\'s search will be):',
2,
3,
1,
disable_notification=True,
),
call(
'Do you want to talk with somebody, practice in foreign languages or you just'
' want to have some fun? Rand Talk will help you! It\'s a bot matching you'
' with a random stranger of desired sex speaking on your language. {0}',
'https://telegram.me/RandTalkBot?start=foo_start_args',
disable_notification=True,
disable_web_page_preview=True,
),
],
)
@patch('randtalkbot.stranger.asyncio', CoroutineMock())
@patch('randtalkbot.stranger.StatsService')
async def test_advertise__people_are_searching_chat_lacks_females(self, stats_service_mock):
from randtalkbot.stranger import asyncio as asyncio_mock
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_invitation_link = Mock(return_value='foo_invitation_link')
self.stranger.looking_for_partner_from = datetime.datetime.utcnow()
self.stranger.save()
self.stranger2.looking_for_partner_from = datetime.datetime.utcnow()
self.stranger2.save()
stats_service_mock.get_instance \
.return_value \
.get_stats \
.return_value \
.get_sex_ratio \
.return_value = 1.1
await self.stranger._advertise()
asyncio_mock.sleep.assert_called_once_with(30)
self.assertEqual(
sender.send_notification.call_args_list,
[
call(
'The search is going on. {0} users are looking for partner -- change your'
' preferences (languages, partner\'s sex) using /setup command to talk with'
' them.\n'
'Chat *lacks females!* Send the link to your friends and earn {1} bonuses for'
' every invited female and {2} bonus for each male (the more bonuses you have'
' -- the faster partner\'s search will be):',
2,
3,
1,
disable_notification=True,
),
call(
'Do you want to talk with somebody, practice in foreign languages or you just'
' want to have some fun? Rand Talk will help you! It\'s a bot matching you'
' with a random stranger of desired sex speaking on your language. {0}',
'foo_invitation_link',
disable_notification=True,
disable_web_page_preview=True,
),
],
)
@patch('randtalkbot.stranger.asyncio', CoroutineMock())
async def test_advertise__people_are_not_searching(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.looking_for_partner_from = datetime.datetime.utcnow()
self.stranger.save()
await self.stranger._advertise()
sender.send_notification.assert_not_called()
@patch('randtalkbot.stranger.asyncio', CoroutineMock())
@patch('randtalkbot.stranger.StatsService')
@patch('randtalkbot.stranger.LOGGER', Mock())
async def test_advertise__stranger_has_blocked_the_bot(self, stats_service_mock):
from randtalkbot.stranger import LOGGER
self.stranger.get_sender = Mock()
self.stranger.get_sender.return_value.send_notification = CoroutineMock(
side_effect=TelegramError({}, '', 0),
)
self.stranger.get_invitation_link = Mock(return_value='foo_invitation_link')
self.stranger.looking_for_partner_from = datetime.datetime.utcnow()
self.stranger.save()
self.stranger2.looking_for_partner_from = datetime.datetime.utcnow()
self.stranger2.save()
stats_service_mock.get_instance \
.return_value \
.get_stats \
.return_value \
.get_sex_ratio \
.return_value = 1.1
await self.stranger._advertise()
self.assertTrue(LOGGER.warning.called)
@patch('randtalkbot.stranger.asyncio')
async def test_advertise_later(self, asyncio_mock):
self.stranger._advertise = Mock(return_value='foo')
asyncio_mock.sleep = CoroutineMock()
self.stranger.advertise_later()
asyncio_mock.get_event_loop.return_value.create_task.assert_called_once_with('foo')
async def test_end_talk__not_chatting_or_looking_for_partner(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_partner = Mock(return_value=None)
self.stranger.set_partner = CoroutineMock()
await self.stranger.end_talk()
self.assertEqual(self.stranger.looking_for_partner_from, None)
sender.send_notification.assert_not_called()
self.stranger.set_partner.assert_called_once_with(None)
async def test_end_talk__chatting_stranger(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_partner = Mock(return_value=self.stranger2)
self.stranger._notify_talk_ended = CoroutineMock()
self.stranger.set_partner = CoroutineMock()
await self.stranger.end_talk()
self.stranger._notify_talk_ended.assert_called_once_with(by_self=True)
self.assertEqual(self.stranger.looking_for_partner_from, None)
self.stranger.set_partner.assert_called_once_with(None)
@patch('randtalkbot.stranger.LOGGER', Mock())
async def test_end_talk__chatting_stranger_has_blocked_the_bot(self):
from randtalkbot.stranger import LOGGER
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_partner = Mock(return_value=self.stranger2)
self.stranger._notify_talk_ended = CoroutineMock(side_effect=StrangerError())
self.stranger.set_partner = CoroutineMock()
await self.stranger.end_talk()
self.assertTrue(LOGGER.warning.called)
self.assertEqual(self.stranger.looking_for_partner_from, None)
self.stranger.set_partner.assert_called_once_with(None)
async def test_end_talk__looking_for_partner(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.looking_for_partner_from = datetime.datetime(1970, 1, 1)
self.stranger.get_partner = Mock(return_value=None)
self.stranger._notify_talk_ended = CoroutineMock()
self.stranger.set_partner = CoroutineMock()
await self.stranger.end_talk()
sender.send_notification.assert_called_once_with('Looking for partner was stopped.')
sender.send.assert_not_called()
self.assertEqual(self.stranger.looking_for_partner_from, None)
self.stranger.set_partner.assert_called_once_with(None)
@patch('randtalkbot.stranger.LOGGER', Mock())
async def test_end_talk__stranger_looking_for_partner_has_blocked_the_bot(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.looking_for_partner_from = datetime.datetime(1970, 1, 1)
self.stranger.get_partner = Mock(return_value=None)
self.stranger.set_partner = CoroutineMock()
sender.send_notification.side_effect = TelegramError({}, '', 0)
await self.stranger.end_talk()
sender.send_notification.assert_called_once_with('Looking for partner was stopped.')
sender.send.assert_not_called()
self.assertEqual(self.stranger.looking_for_partner_from, None)
self.stranger.set_partner.assert_called_once_with(None)
@asynctest.ignore_loop
def test_get_common_languages__preserves_languages_order(self):
self.stranger.languages = '["foo", "bar", "baz", "boo", "zen"]'
self.stranger2.languages = '["zen", "baz", "zig", "foo", "zam", "baz"]'
self.assertEqual(self.stranger.get_common_languages(self.stranger2), ["foo", "baz", "zen"])
self.stranger.languages = '["zen", "bar", "baz", "foo", "boo"]'
self.stranger2.languages = '["zen", "baz", "zig", "foo", "zam", "baz"]'
self.assertEqual(self.stranger.get_common_languages(self.stranger2), ["zen", "baz", "foo"])
@asynctest.ignore_loop
def test_get_invitation_link(self):
self.stranger.get_start_args = Mock(return_value='foo_start_args')
self.assertEqual(
self.stranger.get_invitation_link(),
'https://telegram.me/RandTalkBot?start=foo_start_args',
)
@asynctest.ignore_loop
def test_get_languages__has_languages(self):
self.stranger.languages = '["foo", "bar", "baz"]'
self.assertEqual(self.stranger.get_languages(), ["foo", "bar", "baz"])
@asynctest.ignore_loop
def test_get_languages__no_languages(self):
self.stranger.languages = None
self.assertEqual(self.stranger.get_languages(), [])
@asynctest.ignore_loop
def test_get_languages__corrupted_json(self):
self.stranger.languages = '["foo'
self.assertEqual(self.stranger.get_languages(), ['en'])
@asynctest.ignore_loop
def test_get_partner__cached(self):
self.stranger._partner = Mock()
self.assertEqual(self.stranger.get_partner(), self.stranger._partner)
@asynctest.ignore_loop
def test_get_partner__none(self):
self.stranger.get_talk = Mock(return_value=None)
self.assertEqual(self.stranger.get_partner(), None)
@asynctest.ignore_loop
def test_get_partner__ok(self):
talk = Mock()
self.stranger.get_talk = Mock(return_value=talk)
partner = talk.get_partner.return_value
self.assertEqual(self.stranger.get_partner(), partner)
talk.get_partner.assert_called_once_with(self.stranger)
self.assertEqual(self.stranger._partner, partner)
@asynctest.ignore_loop
@patch('randtalkbot.stranger.StrangerSenderService', create_autospec(StrangerSenderService))
def test_get_sender(self):
from randtalkbot.stranger import StrangerSenderService as stranger_sender_service_cls_mock
stranger_sender_service_cls_mock.get_instance.return_value.get_or_create_stranger_sender \
.return_value = 'foo_sender'
self.assertEqual(self.stranger.get_sender(), 'foo_sender')
stranger_sender_service_cls_mock.get_instance.return_value.get_or_create_stranger_sender \
.assert_called_once_with(self.stranger)
@asynctest.ignore_loop
def test_get_start_args(self):
self.assertEqual(self.stranger.get_start_args(), 'eyJpIjoiZm9vIn0=')
@asynctest.ignore_loop
def test_get_talk__cached(self):
self.stranger._talk = Mock()
self.assertEqual(self.stranger.get_talk(), self.stranger._talk)
@asynctest.ignore_loop
@patch('randtalkbot.talk.Talk', Mock())
def test_get_talk__ok(self):
from randtalkbot.talk import Talk
self.assertEqual(self.stranger.get_talk(), Talk.get_talk.return_value)
Talk.get_talk.assert_called_once_with(self.stranger)
@asynctest.ignore_loop
def test_is_novice__novice(self):
self.stranger.languages = None
self.stranger.sex = None
self.stranger.partner_sex = None
self.assertTrue(self.stranger.is_novice())
@asynctest.ignore_loop
def test_is_novice__not_novice(self):
self.stranger.languages = 'foo'
self.stranger.sex = None
self.stranger.partner_sex = None
self.assertFalse(self.stranger.is_novice())
@asynctest.ignore_loop
def test_is_full__full(self):
self.stranger.languages = 'foo'
self.stranger.sex = 'foo'
self.stranger.partner_sex = 'foo'
self.assertTrue(self.stranger.is_full())
@asynctest.ignore_loop
def test_is_full__not_full(self):
self.stranger.languages = 'foo'
self.stranger.sex = 'foo'
self.stranger.partner_sex = None
self.assertFalse(self.stranger.is_full())
async def test_kick__ok(self):
self.stranger._notify_talk_ended = CoroutineMock()
self.stranger._pay_for_talk = Mock()
self.stranger._partner = self.stranger2
self.stranger._talk = 'foo_talk'
await self.stranger.kick()
self.stranger._notify_talk_ended.assert_called_once_with(by_self=False)
self.stranger._pay_for_talk.assert_called_once_with()
self.assertEqual(self.stranger._partner, None)
self.assertEqual(self.stranger._talk, None)
@patch('randtalkbot.stranger.LOGGER', Mock())
async def test_kick__telegram_error(self):
from randtalkbot.stranger import LOGGER
error = StrangerError()
self.stranger._notify_talk_ended = CoroutineMock(side_effect=error)
self.stranger._pay_for_talk = Mock()
self.stranger._partner = self.stranger2
self.stranger._talk = 'foo_talk'
await self.stranger.kick()
self.stranger._pay_for_talk.assert_called_once_with()
self.assertEqual(self.stranger._partner, None)
self.assertEqual(self.stranger._talk, None)
LOGGER.warning.assert_called_once_with(
'Kick. Can\'t notify stranger %d: %s',
self.stranger.id,
error,
)
@patch('randtalkbot.stranger.asyncio')
async def test_mute_bonuses_notifications(self, asyncio_mock):
self.stranger._unmute_bonuses_notifications = Mock(return_value='foo')
self.stranger.mute_bonuses_notifications()
asyncio_mock.get_event_loop.return_value.create_task.assert_called_once_with('foo')
@patch('randtalkbot.stranger.asyncio', CoroutineMock())
async def test_unmute_bonuses_notifications(self):
from randtalkbot.stranger import asyncio as asyncio_mock
self.stranger.bonus_count = 1200
self.stranger._notify_about_bonuses = CoroutineMock()
await self.stranger._unmute_bonuses_notifications(1000)
asyncio_mock.sleep.assert_called_once_with(3600)
self.stranger._notify_about_bonuses.assert_called_once_with(200)
async def test_notify_about_bonuses__zero(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
await self.stranger._notify_about_bonuses(0)
sender.send_notification.assert_not_called()
async def test_notify_about_bonuses__one(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.bonus_count = 1000
await self.stranger._notify_about_bonuses(1)
sender.send_notification.assert_called_once_with(
'You\'ve received one bonus for inviting a person to the bot.'
' Bonuses will help you to find partners quickly. Total bonuses count: {0}.'
' Congratulations!\n'
'To mute this notifications, use /mute\\_bonuses.',
1000,
)
async def test_notify_about_bonuses__many(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.bonus_count = 1000
await self.stranger._notify_about_bonuses(2)
sender.send_notification.assert_called_once_with(
'You\'ve received {0} bonuses for inviting a person to the bot.'
' Bonuses will help you to find partners quickly. Total bonuses count: {1}.'
' Congratulations!\n'
'To mute this notifications, use /mute\\_bonuses.',
2,
1000,
)
@patch('randtalkbot.stranger.LOGGER', Mock())
async def test_notify_about_bonuses__telegram_error(self):
from randtalkbot.stranger import LOGGER
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
error = TelegramError({}, '', 0)
sender.send_notification.side_effect = error
await self.stranger._notify_about_bonuses(1)
LOGGER.info.assert_called_once_with('Can\'t notify stranger %d about bonuses: %s', 1, error)
async def test_notify_talk_ended__by_self_no_bonuses(self):
sender = CoroutineMock()
sender._ = Mock(side_effect=['Chat was finished.', 'Feel free to /begin a new talk.'])
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_partner = Mock(return_value=None)
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 0
await self.stranger._notify_talk_ended(by_self=True)
self.assertEqual(
sender._.call_args_list,
[
call('Chat was finished.'),
call('Feel free to /begin a new talk.'),
],
)
sender.send_notification \
.assert_called_once_with('Chat was finished. Feel free to /begin a new talk.')
async def test_notify_talk_ended__not_by_self_no_bonuses(self):
sender = CoroutineMock()
sender._ = Mock(
side_effect=['Your partner has left chat.', 'Feel free to /begin a new talk.'],
)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_partner = Mock(return_value=None)
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 0
await self.stranger._notify_talk_ended(by_self=False)
self.assertEqual(
sender._.call_args_list,
[
call('Your partner has left chat.'),
call('Feel free to /begin a new talk.'),
],
)
sender.send_notification.assert_called_once_with(
'Your partner has left chat. Feel free to /begin a new talk.',
)
async def test_notify_talk_ended__telegram_error(self):
sender = CoroutineMock()
sender.send_notification.side_effect = TelegramError({}, '', 0)
sender._ = Mock(
side_effect=['Your partner has left chat.', 'Feel free to /begin a new talk.'],
)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_partner = Mock(return_value=None)
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 0
with self.assertRaises(StrangerError):
await self.stranger._notify_talk_ended(by_self=False)
async def test_notify_talk_ended__not_by_self_was_used_last_bonus(self):
sender = CoroutineMock()
sender._ = Mock(
side_effect=[
'Your partner has left chat.',
'You\'ve used your last bonus.',
'Feel free to /begin a new talk.',
],
)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_partner = Mock(return_value=None)
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 1
await self.stranger._notify_talk_ended(by_self=False)
self.assertEqual(
sender._.call_args_list,
[
call('Your partner has left chat.'),
call('You\'ve used your last bonus.'),
call('Feel free to /begin a new talk.'),
],
)
sender.send_notification.assert_called_once_with(
'Your partner has left chat. You\'ve used your last bonus. Feel free to /begin'
' a new talk.',
)
async def test_notify_talk_ended__not_by_self_was_used_a_bonus(self):
sender = CoroutineMock()
sender._ = Mock(
side_effect=[
'Your partner has left chat.',
'You\'ve used one bonus. {0} bonus(es) left.',
'Feel free to /begin a new talk.',
],
)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.get_partner = Mock(return_value=None)
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 1000
await self.stranger._notify_talk_ended(by_self=False)
self.assertEqual(
sender._.call_args_list,
[
call('Your partner has left chat.'),
call('You\'ve used one bonus. {0} bonus(es) left.'),
call('Feel free to /begin a new talk.'),
],
)
sender.send_notification.assert_called_once_with(
'Your partner has left chat. You\'ve used one bonus. 999 bonus(es) left. Feel free'
' to /begin a new talk.',
)
@patch('randtalkbot.stranger.get_languages_names', Mock())
async def test_notify_partner_found__all_languages_are_common(self):
sender = CoroutineMock()
sender.update_translation = Mock()
sender._ = Mock(side_effect=['Your partner is here.', 'Have a nice chat!'])
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=None)
self.stranger2.languages = '["baz", "bar", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
sender._.call_args_list,
[
call('Your partner is here.'),
call('Have a nice chat!'),
],
)
sender.send_notification.assert_called_once_with(
'Your partner is here. Have a nice chat!',
)
@patch('randtalkbot.stranger.get_languages_names', Mock())
async def test_notify_partner_found__had_partner_already_no_bonuses(self):
sender = CoroutineMock()
sender.update_translation = Mock()
sender._ = Mock(side_effect=['Here\'s another stranger.', 'Have a nice chat!'])
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=self.stranger3)
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 0
self.stranger2.languages = '["baz", "bar", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
sender._.call_args_list,
[
call('Here\'s another stranger.'),
call('Have a nice chat!'),
],
)
sender.send_notification.assert_called_once_with(
'Here\'s another stranger. Have a nice chat!',
)
@patch('randtalkbot.stranger.get_languages_names', Mock())
async def test_notify_partner_found__updates_translation(self):
def update_translation(unused_partner=None):
sender._ = new_translation
old_translation = Mock()
new_translation = Mock(side_effect=['Use {0} please.', 'Your partner is here.'])
sender = CoroutineMock()
sender.update_translation = Mock(side_effect=update_translation)
sender._ = old_translation
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=None)
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 0
self.stranger2.languages = '["baz"]'
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
new_translation.call_args_list,
[
call('Use {0} please.'),
call('Your partner is here.'),
],
)
old_translation.assert_not_called()
@patch('randtalkbot.stranger.get_languages_names', Mock())
async def test_notify_partner_found__was_bonus_used(self):
sender = CoroutineMock()
sender.update_translation = Mock()
sender._ = Mock(
side_effect=[
'You\'ve used one bonus with previous partner. {0} bonus(es) left.',
'Here\'s another stranger.',
],
)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=self.stranger3)
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 1001
self.stranger.looking_for_partner_from = datetime.datetime.utcnow()
self.stranger2.languages = '["baz", "bar", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
sender._.call_args_list,
[
call('You\'ve used one bonus with previous partner. {0} bonus(es) left.'),
call('Here\'s another stranger.'),
],
)
sender.send_notification.assert_called_once_with(
'You\'ve used one bonus with previous partner. 1000 bonus(es) left. Here\'s another'
' stranger.',
)
@patch('randtalkbot.stranger.get_languages_names', Mock())
async def test_notify_partner_found__was_bonus_used_no_bonuses_left(self):
sender = CoroutineMock()
sender.update_translation = Mock()
sender._ = Mock(
side_effect=[
'You\'ve used your last bonus with previous partner.',
'Here\'s another stranger.',
],
)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=self.stranger3)
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 1
self.stranger.looking_for_partner_from = datetime.datetime.utcnow()
self.stranger2.languages = '["baz", "bar", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
sender._.call_args_list,
[
call('You\'ve used your last bonus with previous partner.'),
call('Here\'s another stranger.'),
],
)
sender.send_notification.assert_called_once_with(
'You\'ve used your last bonus with previous partner. Here\'s another stranger.',
)
@patch('randtalkbot.stranger.get_languages_names', Mock(return_value='Foo'))
async def test_notify_partner_found__knows_uncommon_languages_one_common(self):
from randtalkbot.stranger import get_languages_names
sender = CoroutineMock()
sender.update_translation = Mock()
sender._ = Mock(side_effect=['Use {0} please.', 'Your partner is here.', ])
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz", "boo"]'
self.stranger.get_partner = Mock(return_value=None)
self.stranger2.languages = '["zet", "zen", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
get_languages_names.assert_called_once_with(['foo'])
self.assertEqual(
sender.update_translation.call_args_list,
[
call(self.stranger2),
call(),
],
)
self.assertEqual(
sender._.call_args_list,
[
call('Use {0} please.'),
call('Your partner is here.'),
],
)
sender.send_notification.assert_called_once_with(
'Your partner is here. Use Foo please.',
)
@patch('randtalkbot.stranger.get_languages_names', Mock(return_value='Foo, Bar'))
async def test_notify_partner_found__knows_uncommon_languages_several_common(self):
from randtalkbot.stranger import get_languages_names
sender = CoroutineMock()
sender.update_translation = Mock()
sender._ = Mock(
side_effect=['You can use the following languages: {0}.', 'Your partner is here.', ],
)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz", "boo"]'
self.stranger.get_partner = Mock(return_value=None)
self.stranger2.languages = '["zet", "bar", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
get_languages_names.assert_called_once_with(['foo', 'bar'])
self.assertEqual(
sender.update_translation.call_args_list,
[
call(self.stranger2),
call(),
],
)
self.assertEqual(
sender._.call_args_list,
[
call('You can use the following languages: {0}.'),
call('Your partner is here.'),
],
)
sender.send_notification.assert_called_once_with(
'Your partner is here. You can use the following languages: Foo, Bar.',
)
@patch('randtalkbot.stranger.datetime', Mock())
async def test_notify_partner_found__waiting_several_minutes(self):
sender = CoroutineMock()
sender._ = Mock(side_effect=[
'Your partner is here.',
'Your partner\'s been looking for you for {0} min. Say him \"Hello\" --'
' if he doesn\'t respond to you, launch search again by /begin command.',
])
self.stranger.get_sender = Mock(return_value=sender)
from randtalkbot.stranger import datetime as datetime_mock
datetime_mock.datetime.utcnow.return_value = datetime.datetime(1970, 1, 1, 10, 11)
self.stranger2.looking_for_partner_from = datetime.datetime(1970, 1, 1, 10, 0)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=None)
self.stranger2.languages = '["baz", "bar", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
sender._.call_args_list,
[
call('Your partner is here.'),
call(
'Your partner\'s been looking for you for {0} min. Say him \"Hello\" --'
' if he doesn\'t respond to you, launch search again by /begin command.',
),
],
)
sender.send_notification.assert_called_once_with(
'Your partner is here. Your partner\'s been looking for you for 11 min.'
' Say him "Hello" -- if he doesn\'t respond to you, launch search again by /begin'
' command.',
)
@patch('randtalkbot.stranger.datetime', Mock())
async def test_notify_partner_found__waiting_several_hours(self):
sender = CoroutineMock()
sender._ = Mock(side_effect=[
'Your partner is here.',
'Your partner\'s been looking for you for {0} hr. Say him \"Hello\" --'
' if he doesn\'t respond to you, launch search again by /begin command.',
])
self.stranger.get_sender = Mock(return_value=sender)
from randtalkbot.stranger import datetime as datetime_mock
datetime_mock.datetime.utcnow.return_value = datetime.datetime(1970, 1, 1, 11, 0)
self.stranger2.looking_for_partner_from = datetime.datetime(1970, 1, 1, 10, 0)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=None)
self.stranger2.languages = '["baz", "bar", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
sender._.call_args_list,
[
call('Your partner is here.'),
call(
'Your partner\'s been looking for you for {0} hr. Say him \"Hello\" --'
' if he doesn\'t respond to you, launch search again by /begin command.'),
],
)
sender.send_notification.assert_called_once_with(
'Your partner is here. Your partner\'s been looking for you for 1 hr.'
' Say him "Hello" -- if he doesn\'t respond to you, launch search again by /begin'
' command.',
)
@patch('randtalkbot.stranger.datetime', Mock())
async def test_notify_partner_found__partner_doesnt_wait(self):
sender = CoroutineMock()
sender._ = Mock(side_effect=[
'Your partner is here.',
'Have a nice chat!',
])
self.stranger.get_sender = Mock(return_value=sender)
from randtalkbot.stranger import datetime as datetime_mock
datetime_mock.datetime.utcnow.return_value = datetime.datetime(1970, 1, 1, 11, 0)
self.stranger.looking_for_partner_from = datetime.datetime(1970, 1, 1, 10, 0)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=None)
self.stranger2.languages = '["baz", "bar", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
sender._.call_args_list,
[
call('Your partner is here.'),
call('Have a nice chat!'),
],
)
sender.send_notification.assert_called_once_with('Your partner is here. Have a nice chat!')
@patch('randtalkbot.stranger.datetime', Mock())
async def test_notify_partner_found__waiting_only_a_little_bit(self):
sender = CoroutineMock()
sender._ = Mock(side_effect=[
'Your partner is here.',
'Have a nice chat!',
])
self.stranger.get_sender = Mock(return_value=sender)
from randtalkbot.stranger import datetime as datetime_mock
datetime_mock.datetime.utcnow.return_value = datetime.datetime(1970, 1, 1, 10, 4)
self.stranger2.looking_for_partner_from = datetime.datetime(1970, 1, 1, 10, 0)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=None)
self.stranger2.languages = '["baz", "bar", "foo"]'
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
sender._.call_args_list,
[
call('Your partner is here.'),
call('Have a nice chat!'),
],
)
sender.send_notification.assert_called_once_with('Your partner is here. Have a nice chat!')
@patch('randtalkbot.stranger.get_languages_names', Mock())
async def test_notify_partner_found__telegram_error(self):
sender = CoroutineMock()
sender.send_notification.side_effect = TelegramError({}, '', 0)
sender.update_translation = Mock()
sender._ = Mock(side_effect=[
'Your partner is here.',
'Have a nice chat!',
])
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.get_partner = Mock(return_value=None)
self.stranger2.languages = '["baz", "bar", "foo"]'
with self.assertRaises(StrangerError):
await self.stranger.notify_partner_found(self.stranger2)
self.assertEqual(
sender._.call_args_list,
[
call('Your partner is here.'),
call('Have a nice chat!'),
],
)
sender.send_notification.assert_called_once_with('Your partner is here. Have a nice chat!')
async def test_pay__ok(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.bonus_count = 1000
self.stranger.save = Mock()
await self.stranger.pay(31416, 'foo_gratitude')
self.stranger.save.assert_called_once_with()
self.assertEqual(self.stranger.bonus_count, 32416)
sender.send_notification.assert_called_once_with(
'You\'ve earned {0} bonuses. Total bonus amount: {1}. {2}',
31416,
32416,
'foo_gratitude',
)
@patch('randtalkbot.stranger.LOGGER', Mock())
async def test_pay__telegram_error(self):
from randtalkbot.stranger import LOGGER
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.bonus_count = 1000
self.stranger.save = Mock()
error = TelegramError({}, '', 0)
sender.send_notification.side_effect = error
await self.stranger.pay(31416, 'foo_gratitude')
self.stranger.save.assert_called_once_with()
self.assertEqual(self.stranger.bonus_count, 32416)
LOGGER.info.assert_called_once_with('Pay. Can\'t notify stranger %d: %s', 1, error)
@asynctest.ignore_loop
def test_pay_for_talk__ok(self):
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 1000
self.stranger.save = Mock()
self.stranger._pay_for_talk()
self.assertEqual(self.stranger.bonus_count, 999)
self.stranger.save.assert_called_once_with()
@asynctest.ignore_loop
def test_pay_for_talk__not_successful(self):
talk = Mock()
talk.is_successful.return_value = False
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 1000
self.stranger.save = Mock()
self.stranger._pay_for_talk()
self.assertEqual(self.stranger.bonus_count, 1000)
@asynctest.ignore_loop
def test_pay_for_talk__no_bonuses(self):
talk = Mock()
talk.is_successful.return_value = True
talk.partner1 = self.stranger
self.stranger.get_talk = Mock(return_value=talk)
self.stranger.bonus_count = 0
self.stranger.save = Mock()
self.stranger._pay_for_talk()
self.stranger.save.assert_not_called()
@asynctest.ignore_loop
def test_prevent_advertising__ok(self):
deferred_advertising = Mock()
self.stranger._deferred_advertising = deferred_advertising
self.stranger.prevent_advertising()
deferred_advertising.cancel.assert_called_once_with()
self.assertEqual(self.stranger._deferred_advertising, None)
@asynctest.ignore_loop
def test_prevent_advertising__deferred_is_not_set(self):
self.stranger.prevent_advertising()
self.assertEqual(getattr(stranger, '_deferred_advertising', None), None)
@asynctest.ignore_loop
def test_prevent_advertising__deferred_is_none(self):
self.stranger._deferred_advertising = None
self.stranger.prevent_advertising()
self.assertEqual(self.stranger._deferred_advertising, None)
@patch('randtalkbot.stranger.StatsService', Mock())
async def test_reward_inviter__chat_lacks_such_user(self):
from randtalkbot.stranger import StatsService
talk = Mock()
talk.partner1_sent = 1
talk.partner2_sent = 1
self.stranger.get_talk = Mock(return_value=talk)
StatsService.get_instance \
.return_value \
.get_stats \
.return_value \
.get_sex_ratio \
.return_value = 1.1
self.stranger.invited_by = self.stranger2
self.stranger2._add_bonuses = CoroutineMock()
self.stranger.save = Mock()
self.stranger.sex = 'female'
await self.stranger._reward_inviter()
StatsService.get_instance \
.return_value \
.get_stats \
.return_value \
.get_sex_ratio \
.assert_called_once_with()
self.assertEqual(self.stranger.was_invited_as, 'female')
self.stranger.save.assert_called_once_with()
self.stranger.invited_by._add_bonuses.assert_called_once_with(3)
@patch('randtalkbot.stranger.StatsService', Mock())
async def test_reward_inviter__chat_doesnt_lack_such_user(self):
from randtalkbot.stranger import StatsService
from randtalkbot.stranger import StatsService
talk = Mock()
talk.partner1_sent = 1
talk.partner2_sent = 1
self.stranger.get_talk = Mock(return_value=talk)
StatsService.get_instance \
.return_value \
.get_stats \
.return_value \
.get_sex_ratio \
.return_value = 1.1
self.stranger.invited_by = self.stranger2
self.stranger2._add_bonuses = CoroutineMock()
self.stranger.save = Mock()
self.stranger.sex = 'not_specified'
await self.stranger._reward_inviter()
self.assertEqual(self.stranger.was_invited_as, 'not_specified')
self.stranger.save.assert_called_once_with()
self.stranger.invited_by._add_bonuses.assert_called_once_with(1)
async def test_send__ok(self):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
message = Mock()
await self.stranger.send(message)
sender.send.assert_called_once_with(message)
sender.send_notification.assert_not_called()
async def test_send__sender_error(self):
sender = CoroutineMock()
sender.send.side_effect = StrangerSenderError()
self.stranger.get_sender = Mock(return_value=sender)
message = Mock()
with self.assertRaises(StrangerError):
await self.stranger.send(message)
sender.send.assert_called_once_with(message)
sender.send_notification.assert_not_called()
async def test_send_to_partner__chatting_stranger(self):
self.stranger.get_partner = Mock(return_value=self.stranger2)
self.stranger2.send = CoroutineMock()
message = Mock()
talk = Mock()
self.stranger.get_talk = Mock(return_value=talk)
await self.stranger.send_to_partner(message)
self.stranger2.send.assert_called_once_with(message)
talk.increment_sent.assert_called_once_with(self.stranger)
async def test_send_to_partner__not_chatting_stranger(self):
self.stranger.get_partner = Mock(return_value=None)
with self.assertRaises(MissingPartnerError):
await self.stranger.send_to_partner(Mock())
async def test_send_to_partner__telegram_error(self):
self.stranger.get_partner = Mock(return_value=self.stranger2)
self.stranger2.send = CoroutineMock(side_effect=TelegramError({}, '', 0))
message = Mock()
with self.assertRaises(TelegramError):
await self.stranger.send_to_partner(message)
@asynctest.ignore_loop
def test_set_languages__ok(self):
# 6 languages.
self.stranger.set_languages(['ru', 'en', 'it', 'fr', 'de', 'pt', ])
self.assertEqual(self.stranger.languages, '["ru", "en", "it", "fr", "de", "pt"]')
@asynctest.ignore_loop
def test_set_languages__same(self):
self.stranger.languages = '["foo", "bar", "baz"]'
self.stranger.set_languages(['same'])
self.assertEqual(self.stranger.languages, '["foo", "bar", "baz"]')
@asynctest.ignore_loop
def test_set_languages__empty(self):
from randtalkbot.errors import EmptyLanguagesError
with self.assertRaises(EmptyLanguagesError):
self.stranger.set_languages([])
@asynctest.ignore_loop
def test_set_languages__same_empty(self):
from randtalkbot.errors import EmptyLanguagesError
self.stranger.languages = None
with self.assertRaises(EmptyLanguagesError):
self.stranger.set_languages(['same'])
@asynctest.ignore_loop
def test_set_languages__too_much(self):
self.stranger.languages = None
with self.assertRaises(StrangerError):
# 7 languages.
self.stranger.set_languages(['ru', 'en', 'it', 'fr', 'de', 'pt', 'po'])
@patch('randtalkbot.stranger.datetime')
async def test_set_looking_for_partner__ok(self, datetime_mock):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.set_partner = CoroutineMock()
datetime_mock.datetime.utcnow.return_value = datetime.datetime(1980, 1, 1)
await self.stranger.set_looking_for_partner()
sender.send_notification.assert_called_once_with('Looking for a stranger for you.')
self.assertEqual(self.stranger.looking_for_partner_from, datetime.datetime(1980, 1, 1))
self.stranger.set_partner.assert_called_once_with(None)
@patch('randtalkbot.stranger.datetime')
async def test_set_looking_for_partner__looking_for_partner_already(self, datetime_mock):
sender = CoroutineMock()
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.set_partner = CoroutineMock()
self.stranger.looking_for_partner_from = datetime.datetime(1970, 1, 1)
datetime_mock.datetime.utcnow.return_value = datetime.datetime(1980, 1, 1)
await self.stranger.set_looking_for_partner()
self.assertEqual(self.stranger.looking_for_partner_from, datetime.datetime(1970, 1, 1))
self.stranger.set_partner.assert_called_once_with(None)
@patch('randtalkbot.stranger.datetime')
async def test_set_looking_for_partner__bot_was_blocked(self, datetime_mock):
sender = CoroutineMock()
sender.send_notification.side_effect = TelegramError({}, '', 0)
self.stranger.get_sender = Mock(return_value=sender)
self.stranger.set_partner = CoroutineMock()
datetime_mock.datetime.utcnow.return_value = 'foo_time'
await self.stranger.set_looking_for_partner()
sender.send_notification.assert_called_once_with('Looking for a stranger for you.')
self.assertEqual(self.stranger.looking_for_partner_from, None)
self.stranger.set_partner.assert_called_once_with(None)
@patch('randtalkbot.stranger.datetime', Mock())
@patch('randtalkbot.talk.Talk', Mock())
async def test_set_partner__chatting_stranger(self):
from randtalkbot.stranger import datetime as datetime_mock
from randtalkbot.talk import Talk
self.stranger3.looking_for_partner_from = 'foo_searched_since'
self.stranger3.save = Mock()
self.stranger2.get_partner = Mock(return_value=self.stranger)
self.stranger2.kick = CoroutineMock()
self.stranger.get_partner = Mock(return_value=self.stranger2)
self.stranger._partner = self.stranger2
talk = Mock()
self.stranger._talk = talk
datetime_mock.datetime.utcnow.return_value = 'now'
new_talk = Mock()
Talk.create.return_value = new_talk
await self.stranger.set_partner(self.stranger3)
self.stranger2.kick.assert_called_once_with()
self.assertEqual(talk.end, 'now')
talk.save.assert_called_once_with()
Talk.create.assert_called_once_with(
partner1=self.stranger,
partner2=self.stranger3,
searched_since='foo_searched_since',
)
self.assertEqual(self.stranger._partner, self.stranger3)
self.assertEqual(self.stranger._talk, new_talk)
self.assertEqual(self.stranger3._partner, self.stranger)
self.assertEqual(self.stranger3._talk, new_talk)
self.assertEqual(self.stranger3.looking_for_partner_from, None)
self.stranger3.save.assert_called_once_with()
async def test_set_partner__chatting_stranger_none(self):
self.stranger2.get_partner = Mock(return_value=self.stranger)
self.stranger2.kick = CoroutineMock()
self.stranger.get_partner = Mock(return_value=self.stranger2)
self.stranger._partner = self.stranger2
self.stranger._talk = Mock()
await self.stranger.set_partner(None)
self.stranger2.kick.assert_called_once_with()
self.assertEqual(self.stranger._partner, None)
self.assertEqual(self.stranger._talk, None)
async def test_set_partner__same(self):
self.stranger.get_partner = Mock(return_value=self.stranger2)
self.stranger.save = Mock()
await self.stranger.set_partner(self.stranger2)
self.stranger.save.assert_called_once_with()
@patch('randtalkbot.stranger.datetime', Mock())
@patch('randtalkbot.talk.Talk', Mock())
async def test_set_partner__buggy_chatting_stranger(self):
from randtalkbot.stranger import datetime as datetime_mock
from randtalkbot.talk import Talk
self.stranger3.looking_for_partner_from = 'foo_searched_since'
self.stranger3.save = Mock()
self.stranger2.get_partner = Mock(return_value=self.stranger4)
self.stranger2.kick = CoroutineMock()
self.stranger.get_partner = Mock(return_value=self.stranger2)
self.stranger._partner = self.stranger2
self.stranger.looking_for_partner_from = 'bar_searched_since'
talk = Mock()
self.stranger._talk = talk
datetime_mock.datetime.utcnow.return_value = 'now'
new_talk = Mock()
Talk.create.return_value = new_talk
await self.stranger.set_partner(self.stranger3)
self.stranger2.kick.assert_not_called()
self.assertEqual(self.stranger.looking_for_partner_from, None)
@patch('randtalkbot.stranger.datetime', Mock())
@patch('randtalkbot.talk.Talk', Mock())
async def test_set_partner__not_chatting_stranger(self):
from randtalkbot.stranger import datetime as datetime_mock
from randtalkbot.talk import Talk
self.stranger3.looking_for_partner_from = 'foo_searched_since'
self.stranger3.save = Mock()
self.stranger.get_partner = Mock(return_value=None)
self.stranger._partner = None
self.stranger.bonus_count = 1000
talk = Mock()
self.stranger._talk = talk
datetime_mock.datetime.utcnow.return_value = 'now'
new_talk = Mock()
Talk.create.return_value = new_talk
await self.stranger.set_partner(self.stranger3)
self.assertEqual(self.stranger.bonus_count, 1000)
@asynctest.ignore_loop
def test_set_sex__correct(self):
self.stranger.set_sex(' mALe ')
self.assertEqual(self.stranger.sex, 'male')
@asynctest.ignore_loop
def test_set_sex__translated(self):
self.stranger.set_sex(' МУЖСКОЙ ')
self.assertEqual(self.stranger.sex, 'male')
@asynctest.ignore_loop
def test_set_sex__additional(self):
self.stranger.set_sex(' МАЛЬЧИК ')
self.assertEqual(self.stranger.sex, 'male')
@asynctest.ignore_loop
def test_set_sex__incorrect(self):
from randtalkbot.errors import SexError
self.stranger.sex = 'foo'
with self.assertRaises(SexError):
self.stranger.set_sex('not_a_sex')
self.assertEqual(self.stranger.sex, 'foo')
@asynctest.ignore_loop
def test_set_partner_sex__correct(self):
self.stranger.set_partner_sex(' mALe ')
self.assertEqual(self.stranger.partner_sex, 'male')
@asynctest.ignore_loop
def test_set_partner_sex__additional(self):
self.stranger.set_partner_sex(' МАЛЬЧИК ')
self.assertEqual(self.stranger.partner_sex, 'male')
@asynctest.ignore_loop
def test_set_partner_sex__incorrect(self):
from randtalkbot.errors import SexError
self.stranger.partner_sex = 'foo'
with self.assertRaises(SexError):
self.stranger.set_partner_sex('not_a_sex')
self.assertEqual(self.stranger.partner_sex, 'foo')
@asynctest.ignore_loop
def test_speaks_on_language__novice(self):
self.stranger.languages = None
self.assertFalse(self.stranger.speaks_on_language('foo'))
@asynctest.ignore_loop
def test_speaks_on_language__speaks(self):
self.stranger.languages = '["foo", "bar", "baz"]'
self.assertTrue(self.stranger.speaks_on_language('bar'))
@asynctest.ignore_loop
def test_speaks_on_language__not_speaks(self):
self.stranger.languages = '["foo", "bar", "baz"]'
self.assertFalse(self.stranger.speaks_on_language('boo'))
| quasiyoke/RandTalkBot | tests/test_stranger.py | Python | agpl-3.0 | 58,086 |
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from airflow.contrib.operators.sagemaker_base_operator import SageMakerBaseOperator
config = {
'key1': '1',
'key2': {
'key3': '3',
'key4': '4'
},
'key5': [
{
'key6': '6'
},
{
'key6': '7'
}
]
}
parsed_config = {
'key1': 1,
'key2': {
'key3': 3,
'key4': 4
},
'key5': [
{
'key6': 6
},
{
'key6': 7
}
]
}
class TestSageMakerBaseOperator(unittest.TestCase):
def setUp(self):
self.sagemaker = SageMakerBaseOperator(
task_id='test_sagemaker_operator',
aws_conn_id='sagemaker_test_id',
config=config
)
def test_parse_integer(self):
self.sagemaker.integer_fields = [
['key1'], ['key2', 'key3'], ['key2', 'key4'], ['key5', 'key6']
]
self.sagemaker.parse_config_integers()
self.assertEqual(self.sagemaker.config, parsed_config)
if __name__ == '__main__':
unittest.main()
| Fokko/incubator-airflow | tests/contrib/operators/test_sagemaker_base_operator.py | Python | apache-2.0 | 1,886 |
# Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
'''
>>> from bienstman5_ext import *
>>> m = M(1j)
'''
def run(args = None):
import sys
import doctest
if args is not None:
sys.argv = args
return doctest.testmod(sys.modules.get(__name__))
if __name__ == '__main__':
print("running...")
import sys
status = run()[0]
if (status == 0): print("Done.")
sys.exit(status)
| satya-das/common | third_party/boost_tp/libs/python/test/bienstman5.py | Python | mit | 550 |
from CatalogSurfaceItem import *
FTTextureName = 0
FTColor = 1
FTBasePrice = 2
FlooringTypes = {1000: ('phase_5.5/maps/floor_wood_neutral.jpg', CTBasicWoodColorOnWhite, 150),
1010: ('phase_5.5/maps/flooring_carpetA_neutral.jpg', CTFlatColorDark, 150),
1020: ('phase_4/maps/flooring_tile_neutral.jpg', CTFlatColorDark, 150),
1030: ('phase_5.5/maps/flooring_tileB2.jpg', None, 150),
1040: ('phase_4/maps/grass.jpg', None, 150),
1050: ('phase_4/maps/floor_tile_brick_diagonal2.jpg', None, 150),
1060: ('phase_4/maps/floor_tile_brick_diagonal.jpg', None, 150),
1070: ('phase_4/maps/plazz_tile.jpg', None, 150),
1080: ('phase_4/maps/sidewalk.jpg', CTFlatColorDark, 150),
1090: ('phase_3.5/maps/boardwalk_floor.jpg', None, 150),
1100: ('phase_3.5/maps/dustroad.jpg', None, 150),
1110: ('phase_5.5/maps/floor_woodtile_neutral.jpg', CTBasicWoodColorOnWhite, 150),
1120: ('phase_5.5/maps/floor_tile_neutral.jpg', CTBasicWoodColorOnWhite + CTFlatColorDark, 150),
1130: ('phase_5.5/maps/floor_tile_honeycomb_neutral.jpg', CTBasicWoodColorOnWhite, 150),
1140: ('phase_5.5/maps/UWwaterFloor1.jpg', None, 150),
1150: ('phase_5.5/maps/UWtileFloor4.jpg', None, 150),
1160: ('phase_5.5/maps/UWtileFloor3.jpg', None, 150),
1170: ('phase_5.5/maps/UWtileFloor2.jpg', None, 150),
1180: ('phase_5.5/maps/UWtileFloor1.jpg', None, 150),
1190: ('phase_5.5/maps/UWsandyFloor1.jpg', None, 150),
10000: ('phase_5.5/maps/floor_icecube.jpg', CTWhite, 225),
10010: ('phase_5.5/maps/floor_snow.jpg', CTWhite, 225),
11000: ('phase_5.5/maps/StPatsFloor1.jpg', CTWhite, 225),
11010: ('phase_5.5/maps/StPatsFloor2.jpg', CTWhite, 225)}
class CatalogFlooringItem(CatalogSurfaceItem):
def makeNewItem(self, patternIndex, colorIndex = None):
self.patternIndex = patternIndex
self.colorIndex = colorIndex
CatalogSurfaceItem.makeNewItem(self)
def needsCustomize(self):
return self.colorIndex == None
def getTypeName(self):
return TTLocalizer.SurfaceNames[STFlooring]
def getName(self):
name = TTLocalizer.FlooringNames.get(self.patternIndex)
if name:
return name
return self.getTypeName()
def getSurfaceType(self):
return STFlooring
def getPicture(self, avatar):
frame = self.makeFrame()
sample = loader.loadModel('phase_5.5/models/estate/wallpaper_sample')
a = sample.find('**/a')
b = sample.find('**/b')
c = sample.find('**/c')
a.setTexture(self.loadTexture(), 1)
a.setColorScale(*self.getColor())
b.setTexture(self.loadTexture(), 1)
b.setColorScale(*self.getColor())
c.setTexture(self.loadTexture(), 1)
c.setColorScale(*self.getColor())
sample.reparentTo(frame)
self.hasPicture = True
return (frame, None)
def output(self, store = -1):
return 'CatalogFlooringItem(%s, %s%s)' % (self.patternIndex, self.colorIndex, self.formatOptionalData(store))
def getFilename(self):
return FlooringTypes[self.patternIndex][FTTextureName]
def compareTo(self, other):
if self.patternIndex != other.patternIndex:
return self.patternIndex - other.patternIndex
return 0
def getHashContents(self):
return self.patternIndex
def getBasePrice(self):
return FlooringTypes[self.patternIndex][FTBasePrice]
def loadTexture(self):
from pandac.PandaModules import Texture
filename = FlooringTypes[self.patternIndex][FTTextureName]
texture = loader.loadTexture(filename)
texture.setMinfilter(Texture.FTLinearMipmapLinear)
texture.setMagfilter(Texture.FTLinear)
return texture
def getColor(self):
if self.colorIndex == None:
colorIndex = 0
else:
colorIndex = self.colorIndex
colors = FlooringTypes[self.patternIndex][FTColor]
if colors:
if colorIndex < len(colors):
return colors[colorIndex]
else:
print 'Warning: colorIndex not in colors. Returning white.'
return CT_WHITE
else:
return CT_WHITE
return
def decodeDatagram(self, di, versionNumber, store):
CatalogAtticItem.CatalogAtticItem.decodeDatagram(self, di, versionNumber, store)
if versionNumber < 3:
self.patternIndex = di.getUint8()
else:
self.patternIndex = di.getUint16()
if versionNumber < 4 or store & CatalogItem.Customization:
self.colorIndex = di.getUint8()
else:
self.colorIndex = None
wtype = FlooringTypes[self.patternIndex]
return
def encodeDatagram(self, dg, store):
CatalogAtticItem.CatalogAtticItem.encodeDatagram(self, dg, store)
dg.addUint16(self.patternIndex)
if store & CatalogItem.Customization:
dg.addUint8(self.colorIndex)
def getFloorings(*indexList):
list = []
for index in indexList:
list.append(CatalogFlooringItem(index))
return list
def getAllFloorings(*indexList):
list = []
for index in indexList:
colors = FlooringTypes[index][FTColor]
if colors:
for n in range(len(colors)):
list.append(CatalogFlooringItem(index, n))
else:
list.append(CatalogFlooringItem(index, 0))
return list
def getFlooringRange(fromIndex, toIndex, *otherRanges):
list = []
froms = [fromIndex]
tos = [toIndex]
i = 0
while i < len(otherRanges):
froms.append(otherRanges[i])
tos.append(otherRanges[i + 1])
i += 2
for patternIndex in FlooringTypes.keys():
for fromIndex, toIndex in zip(froms, tos):
if patternIndex >= fromIndex and patternIndex <= toIndex:
colors = FlooringTypes[patternIndex][FTColor]
if colors:
for n in range(len(colors)):
list.append(CatalogFlooringItem(patternIndex, n))
else:
list.append(CatalogFlooringItem(patternIndex, 0))
return list
| ksmit799/Toontown-Source | toontown/catalog/CatalogFlooringItem.py | Python | mit | 6,140 |
from proteus import *
from proteus.default_n import *
from la_gauss_2d_p import *
timeOrder = 1
nStagesTime = timeOrder
runCFL = 0.45
DT = None
nDTout = 10
#BackwardEuler, ForwardEuler
timeIntegration = LinearSSPRKintegration
stepController=Min_dt_RKcontroller
femSpaces = {0:DG_Constants}
# elementQuadrature = SimplexLobattoQuadrature(nd,1)
# elementBoundaryQuadrature = SimplexLobattoQuadrature(nd-1,1)
elementQuadrature = SimplexGaussQuadrature(nd,3)
elementBoundaryQuadrature = SimplexGaussQuadrature(nd-1,3)
nLevels = 4
subgridError = None
numericalFluxType = Advection_DiagonalUpwind
shockCapturing = None
multilevelNonlinearSolver = NLNI
levelNonlinearSolver = Newton
nonlinearSmoother = NLGaussSeidel
fullNewtonFlag = True
tolFac = 0.01
nl_atol_res = 1.0e-8
matrix = SparseMatrix
multilevelLinearSolver = LU
levelLinearSolver = LU
linearSmoother = GaussSeidel
linTolFac = 0.001
conservativeFlux = None
checkMass = True
archiveFlag = ArchiveFlags.EVERY_USER_STEP
| erdc/proteus | proteus/tests/dg/la_gauss_2d_dgp0_n.py | Python | mit | 997 |
def merge_sort(unsorted):
if len(unsorted) <= 1:
return unsorted
left, right = [], []
midi = len(unsorted) / 2
for elem in unsorted[:midi]:
left.append(elem)
for elem in unsorted[midi:]:
right.append(elem)
left = merge_sort(left)
right = merge_sort(right)
return merge(left, right)
def merge(left, right):
result = []
while len(left) or len(right):
if len(left) and len(right):
if left[0] <= right[0]:
result.append(left[0])
left = left[1:]
else:
result.append(right[0])
right = right[1:]
elif len(left):
result.extend(left)
left = []
elif len(right):
result.extend(right)
right = []
return result
if __name__ == '__main__':
best_case, worst_case = [], []
for i in xrange(65336):
best_case.append(i)
worst_case.append((32768 + i * 32768 + (i / 2)) % 65336)
import time
t1 = time.time()
merge_sort(best_case)
t2 = time.time()
print "Best case: %f seconds" % (t2 - t1)
t3 = time.time()
merge_sort(worst_case)
t4 = time.time()
print "Worst case: %f seconds" % (t4 - t3)
| markcharyk/data-structures | data_structures/merge_sort.py | Python | mit | 1,259 |
{
"uidPageLogin": {
W3Const.w3PropType: W3Const.w3TypePanel,
W3Const.w3PropSubUI: [
"uidLoginTable"
]
},
"uidLoginTable": {
W3Const.w3PropType: W3Const.w3TypeTable,
W3Const.w3PropSubUI: [
[], # No header
["uidUsernameLabel", "uidUsername"],
["uidPasswordLabel", "uidPassword"],
["uidNullLabel", "uidLoginButton"]
]
},
"uidUsername": {
W3Const.w3PropType: W3Const.w3TypeText
},
"uidPassword": {
W3Const.w3PropType: W3Const.w3TypePassword
},
"uidLoginButton": {
W3Const.w3PropType: W3Const.w3TypeButton,
W3Const.w3PropString: "sidLogin",
W3Const.w3PropEvent: {
W3Const.w3EventClick: [
"EJLogin('uidUsername', 'uidPassword')"
]
}
}
}
| eddiedb6/ej | web/metadata/EJUILoginPage.py | Python | mit | 875 |
"""@author Sebastien E. Bourban
"""
"""@note ... this work is based on a collaborative effort between
.________. ,--.
| | . ( (
|,-. / HR Wallingford EDF - LNHE / \_ \_/ .--.
/ \ / Howbery Park, 6, quai Watier \ ) /_ )
,. `' Wallingford, Oxfordshire 78401 Cedex `-'_ __ `--
/ \ / OX10 8BA, United Kingdom Chatou, France __/ \ \ `.
/ `-'| www.hrwallingford.com innovation.edf.com | ) ) )
!________! `--' `--
"""
"""@history 11/11/2011 -- Sebastien E. Bourban
Implementation of the important bits of METIS in python
"""
"""@brief
This file contains the top level routines for the multilevel recursive
bisection algorithm PMETIS.
Copyright 1997-2009, Regents of the University of Minnesota
"""
"""@note
Graph Data Structure:
All of the graph partitioning and sparse matrix ordering routines in
METIS take as input the adjacency structure of the graph and the weights of
the vertices and edges (if any).
The adjacency structure of the graph is stored using the compressed
storage format (CSR). The CSR format is a widely used scheme for storing
sparse graphs. In this format the adjacency structure of a graph with n
vertices and m edges is represented using two arrays xadj and adjncy.
The xadj array is of size n + 1 whereas the adjncy array is of size 2m
(this is because for each edge between vertices v and u we actually store
both (v; u) and (u; v)). The adjacency structure of the graph is stored as
follows. Assuming that vertex numbering starts from 0 (C style), then the
adjacency list of vertex i is stored in array adjncy starting at index
xadj[i] and ending at (but not including) index xadj[i+1] (i.e.,
adjncy[xadj[i]] through and including adjncy[xadj[i+1]-1]). That is, for
each vertex i, its adjacency list is stored in consecutive locations in the
array adjncy, and the array xadj is used to point to where it begins and
where it ends.
Weight Data Structure:
The weights of the vertices (if any) are stored in an additional array
called vwgt. If ncon is the number of weights associated with each vertex,
the array vwgt contains n * ncon elements (recall that n is the number of
vertices). The weights of the ith vertex are stored in ncon consecutive
entries starting at location vwgt[i * ncon]. Note that if each vertex has
only a single weight, then vwgt will contain n elements, and vwgt[i] will
store the weight of the 22ith vertex. The vertex-weights must be integers
greater or equal to zero. If all the vertices of the graph have the same
weight (i.e., the graph is unweighted), then the vwgt can be set to NULL.
The weights of the edges (if any) are stored in an additional array called
adjwgt. This array contains 2melements, and the weight of edge adjncy[j]
is stored at location adjwgt[j]. The edge-weights must be integers greater
than zero. If all the edges of the graph have the same weight (i.e., the
graph is unweighted), then the adjwgt can be set to NULL
Mesh Data Structure:
All of the mesh partitioning and mesh conversion routines in METIS take
as input the element node array of a mesh. This element node array is stored
using a pair of arrays called eptr and eind, which are similar to the xadj
and adjncy arrays used for storing the adjacency structure of a graph.
The size of the eptr array is n+1, where n is the number of elements in
the mesh. The size of the eind array is of size equal to the sum of the
number of nodes in all the elements of the mesh. The list of nodes belonging
to the ith element of the mesh are stored in consecutive locations of eind
starting at position eptr[i] up to (but not including) position eptr[i+1].
This format makes it easy to specify meshes of any type of elements,
including meshes with mixed element types that have different number of
nodes per element. As it was the case with the format of the mesh file, the
ordering of the nodes in each element is not important.
"""
# _____ ___________________________________________________
# ____/ Imports /__________________________________________________/
#
# ~~> dependencies towards standard python
import sys
from os import path, getcwd
import numpy as np
# ~~> dependencies towards other siblings
from progressbar import ProgressBar
from files import getFileContent
# ~~> dependencies towards one level up modules
sys.path.append(path.join(path.dirname(sys.argv[0]), "..")) # clever you !
from parsers.parserSELAFIN import CONLIM, SELAFIN, subsetVariablesSLF
from parsers.parserKenue import putInS
# _____ ___________________________________
# ____/ Primary SELAFIN Classes /__________________________________/
#
class splitSELAFIN:
def __init__(self, SLFfileName, CLMfileName, SEQfileName="", splitCONLIM=False, DOMfileRoot=""):
print "\n... Acquiring global files"
# ~~> Acquire global CONLIM file
print " +> CONLIM file"
self.clm = CONLIM(CLMfileName)
self.isCONLIM = splitCONLIM
# ~~> Acquire global SELAFIN file
print " +> SELAFIN file"
self.slf = SELAFIN(SLFfileName)
# ~~> Acquire global SELAFIN file
if SEQfileName != "":
print " +> SEQUENCE file"
self.NPARTS, self.NSPLIT, self.KSPLIT = self.getSplitFromSequence(
np.array(getFileContent(SEQfileName), dtype="<i4")
)
else:
self.NPARTS, self.NSPLIT, self.KSPLIT = self.getSplitFromNodeValues("PROCESSORS")
print "\n... Split by elements in ", self.NPARTS, " parts\n"
# ~~> Clean inconsistencies in boundary segments
self.IPOBO, self.NSPLIT, self.KSPLIT = self.setSplitForBoundaries(self.NSPLIT, self.clm.KFRGL, self.KSPLIT)
self.PINTER, self.PNHALO, self.PNODDS = self.setSplitForElements(
self.IPOBO, self.NPARTS, self.NSPLIT, self.KSPLIT
)
self.slfn = self.copyCommonData()
# ~~> Optional output file names
self.isDOMAIN = DOMfileRoot
# Make a copy of common information for sub-meshes
def copyCommonData(self):
SLFn = SELAFIN("")
# Meta data
SLFn.TITLE = self.slf.TITLE
SLFn.file = self.slf.file
SLFn.IPARAM = self.slf.IPARAM
# Time
SLFn.DATETIME = self.slf.DATETIME
SLFn.tags = self.slf.tags
# Variables
SLFn.NBV1 = self.slf.NBV1
SLFn.VARNAMES = self.slf.VARNAMES
SLFn.VARUNITS = self.slf.VARUNITS
SLFn.NBV2 = self.slf.NBV2
SLFn.CLDNAMES = self.slf.CLDNAMES
SLFn.CLDUNITS = self.slf.CLDUNITS
SLFn.NVAR = self.slf.NVAR
SLFn.VARINDEX = range(self.slf.NVAR)
# Unchanged numbers
SLFn.NPLAN = self.slf.NPLAN
SLFn.NDP2 = self.slf.NDP2
SLFn.NDP3 = self.slf.NDP3
return SLFn
# Split based on a sequence of parts, one for each element (result from METIS)
def getSplitFromSequence(self, KSPLIT):
# ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts
NPARTS = max(*KSPLIT)
NSPLIT = np.zeros(self.slf.NPOIN2, dtype=np.int)
for part in range(NPARTS):
k = np.compress(KSPLIT == (part + 1), range(len(self.slf.IKLE)))
NSPLIT[self.slf.IKLE[k]] = KSPLIT[k]
return NPARTS, NSPLIT - 1, KSPLIT - 1
# Split based on the variable PROCESSORS, defined at the nodes
def getSplitFromNodeValues(self, var):
# ~~> Filter for 'PROCESSORS' as input to the getVariablesAt method
i, vn = subsetVariablesSLF(var, self.slf.VARNAMES)
if i == []:
print "... Could not find ", var, ", you may need another split method"
sys.exit(1)
# ~~> NSPLIT is the interger value of the variable PROCESSORS (time frame 0)
NSPLIT = np.array(self.slf.getVariablesAt(0, i)[0], dtype=np.int)
# ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts
NPARTS = max(*NSPLIT) + 1 # User numbering NSPLIT starts from 0
KSPLIT = np.minimum(*(NSPLIT[self.slf.IKLE].T))
return NPARTS, NSPLIT, KSPLIT
def setSplitForBoundaries(self, NSPLIT, KFRGL, KSPLIT):
# ~~> Join up the global boundary nodes with the halo elements
IPOBO = np.zeros(self.slf.NPOIN2, dtype=np.int)
IPOBO[KFRGL.keys()] = np.array(KFRGL.values(), dtype=np.int) + 1 # this is so the nonzero search is easier
# ~~> Cross check partition quality -- step 1
found = True
nloop = 0
while found:
found = False
nloop += 1
for k in range(len(self.slf.IKLE)):
e = self.slf.IKLE[k]
if KSPLIT[k] != max(NSPLIT[e]):
for p1, p2, p3 in zip([0, 1, 2], [1, 2, 0], [2, 0, 1]):
if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[e[p2]] != KSPLIT[k]:
if IPOBO[e[p1]] != 0 and IPOBO[e[p2]] != 0:
print " ~> correcting boundary segment at iteration: ", nloop, (
e[p1],
e[p2],
), k, KSPLIT[k], e, NSPLIT[e]
NSPLIT[e[p1]] = NSPLIT[e[p3]]
NSPLIT[e[p2]] = NSPLIT[e[p3]]
KSPLIT[k] = NSPLIT[e[p3]]
found = True
# ~~> Cross check partition quality -- step 2
found = True
nloop = 0
while found:
found = False
nloop += 1
for k in range(len(self.slf.IKLE)):
e = self.slf.IKLE[k]
if min(NSPLIT[e]) != max(NSPLIT[e]) and KSPLIT[k] != min(NSPLIT[e]):
print " ~> correcting internal segment at iteration: ", nloop, k, KSPLIT[k], e, NSPLIT[e]
KSPLIT[k] = min(NSPLIT[e])
found = True
return IPOBO, NSPLIT, KSPLIT
# Split based on the variable PROCESSORS, defined at the nodes
def setSplitForElements(self, IPOBO, NPARTS, NSPLIT, KSPLIT):
SNHALO = dict([(i, []) for i in range(NPARTS)])
PNODDS = dict([(i, []) for i in range(NPARTS)])
SINTER = dict([(i, []) for i in range(NPARTS)])
# ~~> Internal segments separating parts
pbar = ProgressBar(maxval=len(self.slf.IKLE)).start()
for k in range(len(self.slf.IKLE)):
e = self.slf.IKLE[k]
# Case 1: you are at an internal boundary element
if KSPLIT[k] != max(NSPLIT[e]):
for p1, p2 in zip([0, 1, 2], [1, 2, 0]):
if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[e[p2]] != KSPLIT[k]:
SINTER[KSPLIT[k]].append((e[p1], e[p2]))
SINTER[min(NSPLIT[e[p1]], NSPLIT[e[p2]])].append((e[p2], e[p1]))
# Case 2: you may be at an external boundary element
if np.count_nonzero(IPOBO[e]) > 1:
for p1, p2 in zip([0, 1, 2], [1, 2, 0]):
if IPOBO[e[p1]] != 0 and IPOBO[e[p2]] != 0: # multiplier is not possible
if IPOBO[e[p1]] + 1 == IPOBO[e[p2]]:
SNHALO[KSPLIT[k]].append((e[p1], e[p2]))
else:
PNODDS[KSPLIT[k]].append([e[p1], e[p2]])
pbar.update(k)
pbar.finish()
# ~~> Clean-up of funny segments looping on themselves
for part in range(NPARTS):
# ~~> Quickly checking through to remove duplicate segments
found = True
while found:
found = False
INTER = np.array(SINTER[part], dtype=[("h", int), ("t", int)])
HEADT = np.argsort(INTER["h"])
HLINK = np.searchsorted(INTER["h"][HEADT], INTER["t"][HEADT])
w = 0
while w < len(HLINK):
if HLINK[w] < len(HLINK):
if (
INTER["h"][HEADT[w]] == INTER["t"][HEADT[HLINK[w]]]
and INTER["t"][HEADT[w]] == INTER["h"][HEADT[HLINK[w]]]
):
print " ~> Removing dupicate segments in part: ", part, SINTER[part][
HEADT[w]
], SINTER[part][HEADT[HLINK[w]]]
if HEADT[w] > HEADT[HLINK[w]]:
SINTER[part].pop(HEADT[w])
SINTER[part].pop(HEADT[HLINK[w]])
else:
SINTER[part].pop(HEADT[HLINK[w]])
SINTER[part].pop(HEADT[w])
found = True
break
w += 1
return SINTER, SNHALO, PNODDS
def getIKLE(self, npart):
# ~~> get IKLE for that part ... still with global element numbers
GIKLE = np.compress(self.KSPLIT == npart, self.slf.IKLE, axis=0)
KELLG = np.compress(self.KSPLIT == npart, range(len(self.slf.IKLE)), axis=0)
# ~~> KNOLG(NPOIN3) gives the global node number such that
# for i = 1,NPOIN3: Fwrite(i) = Fread(KNOLG(i)) and is ordered
KNOLG, indices = np.unique(np.ravel(GIKLE), return_index=True)
KNOGL = dict(zip(KNOLG, range(len(KNOLG))))
LIKLE = -np.ones_like(GIKLE, dtype=np.int)
pbar = ProgressBar(maxval=len(GIKLE)).start()
for k in range(len(GIKLE)):
LIKLE[k] = [KNOGL[GIKLE[k][0]], KNOGL[GIKLE[k][1]], KNOGL[GIKLE[k][2]]]
pbar.update(k)
pbar.finish()
return LIKLE, KELLG, KNOLG
def resetPartition(self, part, PINTER, KSPLIT):
MASKER = np.zeros(self.slf.NPOIN2, dtype=np.int)
for p in PINTER:
MASKER[p] = np.arange(len(p)) + 1 # PINTER is ordered
KIKLE = np.compress(np.maximum(*(MASKER[self.slf.IKLE].T)) >= 0, range(len(self.slf.IKLE)))
# KIKLE = np.compress(np.count_nonzero(MASKER[self.slf.IKLE],axis=1)>2,range(len(self.slf.IKLE))) # /!\ does not work ?
pbar = ProgressBar(maxval=len(KIKLE)).start()
for k in KIKLE:
e = self.slf.IKLE[k]
if np.count_nonzero(MASKER[e]) < 2 or KSPLIT[k] == part:
continue
for p1, p2 in zip([0, 1, 2], [1, 2, 0]):
if MASKER[e[p1]] > 0 and MASKER[e[p2]] > 0 and MASKER[e[p2]] > MASKER[e[p1]]:
print " ~> Warning for element of part: ", part, "(was:", KSPLIT[k], ") ", k, e
# KSPLIT[k] = part
pbar.update(k)
pbar.finish()
return KSPLIT
def joinPairs(self, polyLines):
INTER = np.array(polyLines, dtype=[("h", int), ("t", int)])
IDONE = np.ones(len(polyLines), dtype=np.int)
polyA = []
polyZ = []
polyL = []
# ~~> Finding the endings
HEADT = np.argsort(INTER["h"]) # knowing that INTER[HEADT] is sorted by the head
HLINK = np.searchsorted(INTER["h"][HEADT], INTER["t"][HEADT]) # INTER['h'][HEADT] is sorted
# ... HLINK[w] for w in INTER['t'] gives you the position of INTER['t'][w] in INTER['h'][HEADT]
w = min(np.compress(np.not_equal(IDONE, IDONE * 0), range(len(HEADT))))
po = INTER["h"][HEADT[w]]
pe = INTER["t"][HEADT[w]]
IDONE[w] = 0
polyA.append(po)
swapMinMax = True
while True:
if HLINK[w] < len(INTER):
if INTER["t"][HEADT][w] == INTER["h"][HEADT][HLINK[w]]:
w = HLINK[w]
pe = INTER["t"][HEADT][w]
IDONE[w] = 0
if pe not in polyA:
if HLINK[w] < len(INTER):
if INTER["t"][HEADT][w] != po and INTER["t"][HEADT][w] == INTER["h"][HEADT][HLINK[w]]:
continue
if po == pe:
polyL.append(pe)
else:
if pe not in polyZ:
polyZ.append(pe)
else:
polyA.append(po)
if np.count_nonzero(IDONE) == 0:
break
if swapMinMax:
w = max(np.compress(np.not_equal(IDONE, IDONE * 0), range(len(HEADT))))
else:
w = min(np.compress(np.not_equal(IDONE, IDONE * 0), range(len(HEADT))))
swapMinMax = not swapMinMax
po = INTER["h"][HEADT[w]]
pe = INTER["t"][HEADT[w]]
IDONE[w] = 0
polyA.append(po)
# ~~> Finding the sources
TAILT = np.argsort(INTER["t"]) # knowing that INTER[TAILT] is sorted by the tail
TLINK = np.searchsorted(INTER["t"][TAILT], INTER["h"][TAILT]) # INTER['h'][HEADT] is sorted
# ... TLINK[w] for w in polyZ gives you the position of polyZ[w] in INTER['t'][TAILT]
polyGones = []
# ~~> Finding the sources of non-looping lines
TAILS = np.searchsorted(INTER["t"][TAILT], polyZ)
for w in TAILS:
p = [INTER["t"][TAILT[w]]]
while True:
if INTER["h"][TAILT][w] == INTER["t"][TAILT][TLINK[w]]:
po = [INTER["h"][TAILT][w]]
po.extend(p)
p = po
w = TLINK[w]
if TLINK[w] < len(INTER):
if INTER["h"][TAILT][w] == INTER["t"][TAILT][TLINK[w]]:
continue
po = [INTER["h"][TAILT][w]]
po.extend(p)
p = po
break
polyGones.append(p)
# ~~> Finding the sources of looping lines
LOOPS = np.searchsorted(INTER["t"][TAILT], polyL)
for w in LOOPS:
p = [INTER["t"][TAILT[w]]]
while True:
if INTER["h"][TAILT][w] == INTER["t"][TAILT][TLINK[w]]:
po = [INTER["h"][TAILT][w]]
po.extend(p)
p = po
w = TLINK[w]
if INTER["h"][TAILT][w] != p[len(p) - 1]:
continue
po = [INTER["h"][TAILT][w]]
po.extend(p)
p = po
break
polyGones.append(p)
return polyGones
def joinSegments(self, polyLines):
polyGones = []
maxbar = max(len(polyLines), 1)
pbar = ProgressBar(maxval=maxbar).start()
while polyLines != []:
# ~~> starting point
e = polyLines[0]
le = len(e)
a, b = e[0], e[len(e) - 1]
# ~~> case of closed line
if a == b:
polyGones.append(e[0 : len(e)]) # /!\ here you keep the duplicated point
polyLines.pop(0)
continue
# ~~> iterative process
for ei, iline in zip(polyLines[1:], range(len(polyLines))[1:]):
# ~~> merging the two segments
if b == ei[0]:
polyLines[0] = e[0 : len(e)] # copy !
polyLines[0].extend(ei[1:])
polyLines.pop(iline)
break
if a == ei[len(ei) - 1]:
polyLines[0] = ei[0 : len(ei)] # copy !
polyLines[0].extend(e[1:])
polyLines.pop(iline)
break
# ~~> completed search
if le == len(polyLines[0]):
polyGones.append(e[0 : len(e)])
polyLines.pop(0)
pbar.update(maxbar - len(polyLines))
pbar.finish()
return polyGones
def tetrisOddSegments(self, main, odds):
polyGones = []
lo = len(odds)
while main != []:
# ~~> starting point
e = main[0]
le = len(e)
a, b = e[0], e[len(e) - 1]
# ~~> case of closed line
if a == b:
polyGones.append(e[0 : len(e)]) # /!\ here you keep the duplicated point
main.pop(0)
continue
# ~~> iterative process
for ei, iline in zip(odds, range(len(odds))):
# ~~> merging the two segments
if b == ei[0]:
main[0] = e[0 : len(e)]
main[0].extend(ei[1:])
odds.pop(iline)
break
if a == ei[len(ei) - 1]:
main[0] = ei[0 : len(ei)]
main[0].extend(e[1:])
odds.pop(iline)
break
# ~~> completed search
if le == len(main[0]):
polyGones.append(e[0 : len(e)])
main.pop(0)
# ~~> removing the over-constrained elements
for p in polyGones:
if len(p) > 3:
j = 2
while j < len(p):
if p[j - 2] == p[j]:
p.pop(j - 2)
p.pop(j - 2)
j += 1
return polyGones
# Filter poly according to IPOBO on that part.
# ~> gloseg: is the ensemble of either closed islands or
# open external boundary segments
# Note: filtering now seems to mean that to have done a lot of work for nothing
def globalSegments(self, poly):
gloseg = []
for p in poly:
pA = p[0]
pZ = p[len(p) - 1]
closed = False
if pA == pZ and self.IPOBO[pA] != 0:
closed = True
iA = 0
iZ = 0
ploseg = []
for i in p:
if self.IPOBO[i] != 0: # moves the counter along for external points
iZ += 1
elif iZ != 0: # you have just found the end of an external segment
ploseg.append(p[iA : iA + iZ])
iA += iZ + 1
iZ = 0
else:
iA += 1
if iZ != 0:
if closed and len(ploseg) > 0:
i = p[iA : iA + iZ]
i.extend(ploseg[0][1:]) # remove duplicate
ploseg[0] = i
else:
ploseg.append(p[iA : iA + iZ])
gloseg.extend(ploseg)
return gloseg
def putContent(self):
# ~~> Extension for parallel file names
fmtn = "00000" + str(self.NPARTS - 1)
fmtn = fmtn[len(fmtn) - 5 :]
print "\n... Split the boundary connectivity"
# ~~> Assemble internal and external segments
polyCLOSED = dict([(i, []) for i in range(self.NPARTS)])
polyFILTER = dict([(i, []) for i in range(self.NPARTS)])
polyGLOSED = []
for part in range(self.NPARTS): # this could be done in parallel
print " +> Joining up boundary segments for part: ", part + 1
# ~~> Joining up boundaries for sub-domains
print " ~> main internal segments"
self.PINTER[part] = self.joinPairs(self.PINTER[part])
print " ~> main external segments"
polyHALO = self.joinPairs(self.PNHALO[part])
polyHALO.extend(self.PINTER[part])
polyHALO = self.joinSegments(polyHALO)
print " ~> odd segments"
polyODDS = self.joinSegments(self.PNODDS[part])
print " ~> stitching with the odd ones"
polyGones = self.tetrisOddSegments(polyHALO, polyODDS)
print " ~> final closure"
polyCLOSED[part] = self.joinSegments(polyGones)
# ~~> Building up the entire picture
polyFILTER[part] = self.globalSegments(polyCLOSED[part])
polyGLOSED.extend(polyFILTER[part])
# ~~> Joining up boundaries for the global domain (Note: seems counter productive but is not)
polyGLOSED = self.joinSegments(polyGLOSED)
if self.isDOMAIN != "":
print "\n... Printing the domain split into a series of i2s files"
# ~~> Convert node numbers into x,y
for part in range(self.NPARTS):
print " +> part ", part + 1, " of ", self.NPARTS
polyXY = []
for pg in range(len(polyCLOSED[part])):
pxy = []
for pt in range(len(polyCLOSED[part][pg])):
n = polyCLOSED[part][pg][pt]
pxy.append([self.slf.MESHX[n], self.slf.MESHY[n]])
polyXY.append(pxy)
# ~~> Write polygons to double check
fmti = "00000" + str(part)
fmti = fmti[len(fmti) - 5 :]
fileName = path.join(path.dirname(self.slf.file["name"]), self.isDOMAIN + fmtn + "-" + fmti + ".i2s")
putInS(fileName, [], "i2s", polyXY)
# ~~> Convert node numbers into x,y
polyXY = []
for pg in range(len(polyGLOSED)):
pxy = []
for pt in range(len(polyGLOSED[pg])):
n = polyGLOSED[pg][pt]
pxy.append([self.slf.MESHX[n], self.slf.MESHY[n]])
polyXY.append(pxy)
# ~~> Write polygons to double check
fileName = path.join(path.dirname(self.slf.file["name"]), self.isDOMAIN + ".i2s")
putInS(fileName, [], "i2s", polyXY)
print "\n... Final check to the element partitioning"
for part in range(self.NPARTS): # this could be done in parallel
self.KSPLIT = self.resetPartition(part, self.PINTER[part], self.KSPLIT)
if self.isDOMAIN != "":
# ~~> This is optional
print "\n... Printing the domain split into a SELAFIN"
fileRoot, fileExts = path.splitext(self.slf.file["name"])
self.slf.fole.update({"hook": open(fileRoot + "_PROCS" + fileExts, "wb")})
self.slf.appendHeaderSLF()
self.slf.appendCoreTimeSLF(0)
VARSOR = self.slf.getVALUES(0)
for v in range(self.slf.NVAR):
VARSOR[v] = self.NSPLIT
self.slf.appendCoreVarsSLF(VARSOR)
self.slf.fole["hook"].close()
print "\n... Storing the global liquid boundary numbering (NUMLIQ)"
# ~~> Implying NUMLIQ and the number NFRLIQ based on the joined-up lines
self.clm.setNUMLIQ(polyGLOSED)
print "\n... Split the mesh connectivity"
# ~~> Preliminary set up for LIKLE, KNOLG and KEMLG by parts
LIKLE = dict([(i, []) for i in range(self.NPARTS)])
KELLG = dict([(i, []) for i in range(self.NPARTS)])
KNOLG = dict([(i, []) for i in range(self.NPARTS)])
for part in range(self.NPARTS):
print " +> re-ordering IKLE for part ", part + 1
LIKLE[part], KELLG[part], KNOLG[part] = self.getIKLE(part)
# ~~> CONLIM file: Preliminary set up of IFAPAR and ISEG for all parts
IFAPAR = dict([(i, {}) for i in range(self.NPARTS)])
ISEG = {}
# Organising ISEG for easier call: part 1
for part in range(self.NPARTS):
for i in polyFILTER[part]:
if i[0] == i[len(i) - 1]:
continue # /!\ you are here adding one !
if i[0] in ISEG:
ISEG[i[0]].update({part: i[1] + 1})
else:
ISEG.update({i[0]: {part: i[1] + 1}})
if i[len(i) - 1] in ISEG:
ISEG[i[len(i) - 1]].update({part: -i[len(i) - 2] - 1})
else:
ISEG.update({i[len(i) - 1]: {part: -i[len(i) - 2] - 1}})
# Switching parts of ISEG for final call: part 2
for i in ISEG:
if len(ISEG[i]) != 2:
print "... You have a boundary node surounded with more than two boundary segments: ", i
sys.exit(1)
parts = ISEG[i].keys()
ISEG[i] = {parts[0]: ISEG[i][parts[1]], parts[1]: ISEG[i][parts[0]]}
# ~~> CONLIM file: Preliminary set up of NPTIR for all parts
NPTIR = dict([(i, {}) for i in range(self.NPARTS)])
for part in range(self.NPARTS):
for p in self.PINTER[part]:
NPTIR[part].update(dict([(i, []) for i in p]))
parts = range(self.NPARTS)
while parts != []:
part = parts[0]
parts.pop(0)
for ip in NPTIR[part]:
for ipart in parts:
if ip in NPTIR[ipart]:
NPTIR[part][ip].append(ipart)
NPTIR[ipart][ip].append(part)
print "... Split of the SELAFIN file"
for part in range(self.NPARTS):
fmti = "00000" + str(part)
fmti = fmti[len(fmti) - 5 :]
print " +> part ", part + 1, " of ", self.NPARTS
self.slfn.IKLE2 = LIKLE[part]
self.slfn.NELEM2 = len(LIKLE[part])
self.slfn.NPOIN2 = len(KNOLG[part])
# ~~> IPARAM has two new values: 8:NPTFR and 9:NPTIR
self.slfn.IPARAM[7] = len(np.unique(np.concatenate(polyFILTER[part])))
self.slfn.IPARAM[8] = len(NPTIR[part])
# ~~> IPOBO (or IRAND) converted into KNOLG[part]
self.slfn.IPOBO = KNOLG[part] + 1
print " ~> filtering the MESH"
# ~~> GEO file: MESH coordinates
self.slfn.MESHX = np.zeros(self.slfn.NPOIN2, dtype=np.float32)
self.slfn.MESHY = np.zeros(self.slfn.NPOIN2, dtype=np.float32)
self.slfn.MESHX = self.slf.MESHX[KNOLG[part]]
self.slfn.MESHY = self.slf.MESHY[KNOLG[part]]
# ~~> GEO file: File names
fileRoot, fileExts = path.splitext(self.slf.file["name"])
self.slfn.file["name"] = fileRoot + fmtn + "-" + fmti + fileExts
# ~~> GEO file: Printing
print " ~> printing: ", self.slfn.file["name"]
self.slfn.fole.update({"hook": open(self.slfn.file["name"], "wb")})
self.slfn.appendHeaderSLF()
LVARSOR = np.zeros((self.slfn.NVAR, self.slfn.NPOIN2), dtype=np.float32)
for t in range(len(self.slf.tags["times"])):
self.slfn.appendCoreTimeSLF(t)
VARSOR = self.slf.getVALUES(t)
for v in range(self.slfn.NVAR):
LVARSOR[v] = VARSOR[v][KNOLG[part]]
self.slfn.appendCoreVarsSLF(LVARSOR)
self.slfn.fole["hook"].close()
if not self.isCONLIM:
return
print "\n... Connect elements across internal boundaries (IFAPAR)"
for part in range(self.NPARTS):
print " +> part ", part + 1, " of ", self.NPARTS
# ~~> CONLIM file: Preliminary set up of PEHALO elements accross internal boundaries
PEHALO = {}
SEHALO = {}
# Step 1: find out about the primary elements and loop through IKLE
self.NSPLIT *= 0
MASKER = NPTIR[part].keys()
self.NSPLIT[MASKER] += 1
print " ~> Assembling primary elements with other side"
# Sub Step 1: Assembling all edges from the other sides
maxbar = 0
ibar = 0
for ip in range(self.NPARTS):
maxbar += len(LIKLE[ip])
pbar = ProgressBar(maxval=maxbar).start()
for otherpart in range(self.NPARTS):
if otherpart == part:
continue # all parts are still positive at this stage
for k in range(len(LIKLE[otherpart])):
ibar += 1
e = self.slf.IKLE[KELLG[otherpart][k]]
if np.count_nonzero(self.NSPLIT[e]) < 2:
continue
for p1, p2 in zip([1, 2, 0], [0, 1, 2]): # reverse order because looking from the other side
if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0:
if not (e[p1], e[p2]) in PEHALO:
PEHALO.update({(e[p1], e[p2]): [0, []]})
PEHALO[(e[p1], e[p2])][1].append(k)
PEHALO[(e[p1], e[p2])][1].append(otherpart)
pbar.update(ibar)
# Sub Step 2: Assembling all edges from the primary side (there are three times more of them)
for k in range(len(LIKLE[part])):
ibar += 1
j = KELLG[part][k]
e = self.slf.IKLE[j]
if np.count_nonzero(self.NSPLIT[e]) < 2:
continue
for p1, p2, p3 in zip([0, 1, 2], [1, 2, 0], [2, 0, 1]):
if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0:
if (e[p1], e[p2]) in PEHALO: # the good side opposes the dark side
PEHALO[(e[p1], e[p2])][0] = k
if self.NSPLIT[e[p3]] == 0:
self.NSPLIT[e[p3]] = -1
if self.NSPLIT[e[p3]] == -1:
if not (e[p1], e[p3]) in SEHALO:
SEHALO.update({(e[p1], e[p3]): []})
SEHALO[(e[p1], e[p3])].append(k)
if not (e[p2], e[p3]) in SEHALO:
SEHALO.update({(e[p2], e[p3]): []})
SEHALO[(e[p2], e[p3])].append(k)
else: # self.NSPLIT[e[p3]] must be 2 !
if not (e[p3], e[p1]) in SEHALO:
SEHALO.update({(e[p3], e[p1]): []})
if k not in SEHALO[(e[p3], e[p1])]:
SEHALO[(e[p3], e[p1])].append(k)
if not (e[p2], e[p3]) in SEHALO:
SEHALO.update({(e[p2], e[p3]): []})
if k not in SEHALO[(e[p2], e[p3])]:
SEHALO[(e[p2], e[p3])].append(k)
if self.KSPLIT[j] >= 0:
self.KSPLIT[j] = -(self.KSPLIT[j] + 1) # /!\ This is very dangerous but necessary
pbar.update(ibar)
pbar.finish()
# Sub Step 3: Final clean up of the other side ? no need but check later for (ei)[0] == 0
# Step 2: find out about the secondary elements on IKLE ( local LIKLE ? )
print " ~> Assembling secondary elements of that side"
pbar = ProgressBar(maxval=len(LIKLE[part])).start()
for k in range(len(LIKLE[part])):
j = KELLG[part][k]
e = self.slf.IKLE[j]
if self.KSPLIT[j] != part:
continue
if np.count_nonzero(self.NSPLIT[e]) < 2:
continue
for i in [0, 1, 2]:
ii = (i + 1) % 3
if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] < 0 and (e[i], e[ii]) in SEHALO:
SEHALO[(e[i], e[ii])].append(k) # correct orientation
if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] > 0 and (e[ii], e[i]) in SEHALO:
SEHALO[(e[ii], e[i])].append(k) # opposite orientation
ii = (i + 2) % 3
if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] < 0 and (e[i], e[ii]) in SEHALO:
SEHALO[(e[i], e[ii])].append(k) # correct orientation
if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] > 0 and (e[i], e[ii]) in SEHALO:
SEHALO[(e[i], e[ii])].append(k) # opposite orientation
if self.KSPLIT[j] < 0:
self.KSPLIT[j] = -self.KSPLIT[j] - 1 # /!\ back to a safe place
pbar.update(k)
pbar.finish()
# Step 3: finally cross reference information between SEHALO and PEHALO
print " ~> Combining sides surrounding the halo-elements"
for ie in PEHALO:
if PEHALO[ie][0] == 0:
continue
k = PEHALO[ie][0] # element number in its local part numbering
if not k in IFAPAR[part]:
IFAPAR[part].update({k: [-2, -1, -2, -1, -2, -1]})
j = KELLG[part][k]
e = self.slf.IKLE[j]
for p1, p2 in zip([0, 1, 2], [1, 2, 0]):
if (e[p1], e[p2]) in SEHALO:
if len(SEHALO[(e[p1], e[p2])]) > 1:
if SEHALO[(e[p1], e[p2])][0] == k:
IFAPAR[part][k][2 * p1] = SEHALO[(e[p1], e[p2])][1]
if SEHALO[(e[p1], e[p2])][1] == k:
IFAPAR[part][k][2 * p1] = SEHALO[(e[p1], e[p2])][0]
IFAPAR[part][k][1 + 2 * p1] = part
if (e[p2], e[p1]) in SEHALO:
if len(SEHALO[(e[p2], e[p1])]) > 1:
if SEHALO[(e[p2], e[p1])][0] == k:
IFAPAR[part][k][2 * p1] = SEHALO[(e[p2], e[p1])][1]
if SEHALO[(e[p2], e[p1])][1] == k:
IFAPAR[part][k][2 * p1] = SEHALO[(e[p2], e[p1])][0]
IFAPAR[part][k][1 + 2 * p1] = part
if ie == (e[p1], e[p2]):
IFAPAR[part][k][2 * p1] = PEHALO[ie][1][0]
IFAPAR[part][k][1 + 2 * p1] = PEHALO[ie][1][1]
# ~~> CONLIM file: Write to file ... pfuuuuuh ... this is it !
print "\n... Split of the CONLIM files"
for part in range(self.NPARTS):
fmti = "00000" + str(part)
fmti = fmti[len(fmti) - 5 :]
print " +> part: ", part + 1, " of ", self.NPARTS
# ~~> CONLIM file: Set the filter
INDEX = np.zeros_like(self.clm.INDEX, dtype=np.int)
for contour in polyFILTER[part]:
# ~~> Closed contour: no need to change ISEG
if contour[0] == contour[len(contour) - 1]:
for c in contour[1:]:
INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c] + 1
# ~~> Open contour: need to change ISEG with neighbours
else:
for c in contour[0:]:
INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c] + 1
iA = self.clm.KFRGL[contour[0]]
self.clm.POR["is"][iA] = ISEG[contour[0]][part]
self.clm.POR["xs"][iA] = self.slf.MESHX[abs(ISEG[contour[0]][part]) - 1] # /!\ MESHX start at 0
self.clm.POR["ys"][iA] = self.slf.MESHY[abs(ISEG[contour[0]][part]) - 1] # /!\ MESHY start at 0
iA = self.clm.KFRGL[contour[len(contour) - 1]]
self.clm.POR["is"][iA] = ISEG[contour[len(contour) - 1]][part]
self.clm.POR["xs"][iA] = self.slf.MESHX[abs(ISEG[contour[len(contour) - 1]][part]) - 1]
self.clm.POR["ys"][iA] = self.slf.MESHY[abs(ISEG[contour[len(contour) - 1]][part]) - 1]
self.clm.INDEX = INDEX
# ~~> CONLIM file: Set the NPTIR and CUTs
self.clm.NPTIR = NPTIR[part]
# ~~> CONLIM file: Set the IFAPAR
self.clm.IFAPAR = IFAPAR[part]
# ~~> CONLIM file
fileRoot, fileExts = path.splitext(self.clm.fileName)
print " ~> printing: ", fileRoot + fmtn + "-" + fmti + fileExts
self.clm.putContent(fileRoot + fmtn + "-" + fmti + fileExts)
return
# _____ _____________________________________
# ____/ Primary METIS Classes /____________________________________/
#
class Graph:
# Graph size constants
nvtxs = -1
nedges = -1
ncon = -1
mincut = -1
minvol = -1
nbnd = -1
# Memory for the graph structure
xadj = None # = imalloc(snvtxs+1, "SetupSplitGraph: xadj");
vwgt = None # = imalloc(sgraph->ncon*snvtxs, "SetupSplitGraph: vwgt");
adjncy = None # = imalloc(snedges, "SetupSplitGraph: adjncy");
adjwgt = None # = imalloc(snedges, "SetupSplitGraph: adjwgt");
label = None # = imalloc(snvtxs, "SetupSplitGraph: label");
cmap = None
tvwgt = None # = imalloc(sgraph->ncon, "SetupSplitGraph: tvwgt");
invtvwgt = None # = rmalloc(sgraph->ncon, "SetupSplitGraph: invtvwgt");
# By default these are set to true, but the can be explicitly changed afterwards
free_xadj = 1
free_vwgt = 1
free_vsize = 1
free_adjncy = 1
free_adjwgt = 1
# Memory for the partition/refinement structure
where = None
pwgts = None
id = None
ed = None
bndptr = None
bndind = None
nrinfo = None
ckrinfo = None
vkrinfo = None
# Linked-list structure
coarser = None
finer = None
"""
"Usage: mpmetis [options] meshfile nparts",
" ",
" Required parameters",
" meshfile Stores the mesh to be partitioned.",
" nparts The number of partitions to split the mesh.",
" ",
" Optional parameters",
" -gtype=string",
" Specifies the graph to be used for computing the partitioning",
" The possible values are:",
" dual - Partition the dual graph of the mesh [default]",
" nodal - Partition the nodal graph of the mesh",
" ",
" -ptype=string",
" Specifies the scheme to be used for computing the k-way partitioning.",
" The possible values are:",
" rb - Recursive bisectioning",
" kway - Direct k-way partitioning [default]",
" ",
" -ctype=string",
" Specifies the scheme to be used to match the vertices of the graph",
" during the coarsening.",
" The possible values are:",
" rm - Random matching",
" shem - Sorted heavy-edge matching [default]",
" ",
" -iptype=string [applies only when -ptype=rb]",
" Specifies the scheme to be used to compute the initial partitioning",
" of the graph.",
" The possible values are:",
" grow - Grow a bisection using a greedy strategy [default]",
" random - Compute a bisection at random",
" ",
" -objtype=string [applies only when -ptype=kway]",
" Specifies the objective that the partitioning routines will optimize.",
" The possible values are:",
" cut - Minimize the edgecut [default]",
" vol - Minimize the total communication volume",
" ",
" -contig [applies only when -ptype=kway]",
" Specifies that the partitioning routines should try to produce",
" partitions that are contiguous. Note that if the input graph is not",
" connected this option is ignored.",
" ",
" -minconn [applies only when -ptype=kway]",
" Specifies that the partitioning routines should try to minimize the",
" maximum degree of the subdomain graph, i.e., the graph in which each",
" partition is a node, and edges connect subdomains with a shared",
" interface.",
" ",
" -tpwgts=filename",
" Specifies the name of the file that stores the target weights for",
" each partition. By default, all partitions are assumed to be of ",
" the same size.",
" ",
" -ufactor=int",
" Specifies the maximum allowed load imbalance among the partitions.",
" A value of x indicates that the allowed load imbalance is 1+x/1000.",
" For ptype=rb, the load imbalance is measured as the ratio of the ",
" 2*max(left,right)/(left+right), where left and right are the sizes",
" of the respective partitions at each bisection. ",
" For ptype=kway, the load imbalance is measured as the ratio of ",
" max_i(pwgts[i])/avgpwgt, where pwgts[i] is the weight of the ith",
" partition and avgpwgt is the sum of the total vertex weights divided",
" by the number of partitions requested.",
" For ptype=rb, the default value is 1 (i.e., load imbalance of 1.001).",
" For ptype=kway, the default value is 30 (i.e., load imbalance of 1.03).",
" ",
" -ncommon=int",
" Specifies the common number of nodes that two elements must have",
" in order to put an edge between them in the dual graph. Default is 1.",
" ",
" -niter=int",
" Specifies the number of iterations for the refinement algorithms",
" at each stage of the uncoarsening process. Default is 10.",
" ",
" -ncuts=int",
" Specifies the number of different partitionings that it will compute.",
" The final partitioning is the one that achieves the best edgecut or",
" communication volume. Default is 1.",
" ",
" -nooutput",
" Specifies that no partitioning file should be generated.",
" ",
" -seed=int",
" Selects the seed of the random number generator. ",
" ",
" -dbglvl=int ",
" Selects the dbglvl. ",
" ",
" -help",
" Prints this message.",
""
"""
# _____ ___________________________________________
# ____/ General Toolbox /__________________________________________/
#
# _____ ________________________________________________
# ____/ MAIN CALL /_______________________________________________/
#
__author__ = "Sebastien E. Bourban"
__date__ = "$29-Feb-2012 08:51:29$"
if __name__ == "__main__":
PWD = getcwd()
partelName = path.join(PWD, "PARTEL.PAR")
if not path.exists(partelName):
print "... could not find the PARTEL.PAR file in ", PWD
sys.exit(1)
files = getFileContent(partelName)
fileSLF = path.join(PWD, files[0].strip())
if not path.exists(fileSLF):
print "... could not find the file ", fileSLF, " in ", PWD
sys.exit(1)
fileCLM = path.join(PWD, files[1].strip())
if not path.exists(fileCLM):
print "... could not find the file ", fileCLM, " in ", PWD
sys.exit(1)
fileSEQ = path.join(PWD, "RESULT_SEQ_METIS")
if not path.exists(fileSEQ):
fileSEQ = ""
splitCONLIM = False
if len(files) > 5:
splitCONLIM = int(files[5].strip()) == 1
writeDOMAIN = ""
if len(files) > 6:
if int(files[6].strip()) == 1:
writeDOMAIN = "T2DBND"
slfs = splitSELAFIN(fileSLF, fileCLM, fileSEQ, splitCONLIM=splitCONLIM, DOMfileRoot=writeDOMAIN)
slfs.putContent()
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print "\n\nMy work is done\n\n"
sys.exit(0)
| ArteliaTelemac/PostTelemac | PostTelemac/meshlayerparsers/libs_telemac/utilstelemac/partitioning.py | Python | gpl-3.0 | 47,115 |
#!/usr/bin/env python
#Copyright 2013 Mitchell Jon Stanton-Cook Licensed under the
#Educational Community License, Version 2.0 (the "License"); you may
#not use this file except in compliance with the License. You may
#obtain a copy of the License at
#
##http://www.osedu.org/licenses/ECL-2.0
#
##Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an "AS IS"
#BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
#or implied. See the License for the specific language governing
#permissions and limitations under the License.
"""
Ensure that uncalled bases ('N') have a quality score of 2
"""
import gzip
import os
import sys
import traceback
import argparse
import time
import __init__ as meta
epi = "Licence: %s by %s <%s>" % (meta.__licence__, meta.__author__,
meta.__author_email__)
prog = meta.__name__.replace(' ', '_')
__doc__ = " %s v%s - %s" % (prog, meta.__version__, meta.__description__)
def reset_quality(args):
args.qual_enc = int(args.qual_enc)
nQ = ''
if args.qual_enc == 33:
nQ = '#'
print "Using Q33"
elif args.qual_enc == 64:
nQ = 'B'
print "Using Q64"
else:
print "Only support 33 or 64 PHRED encoded"
sys.exit(1)
args.input = os.path.expanduser(args.input)
if args.input.endswith('.gz'):
f = gzip.open(args.input, 'r')
fout = gzip.open(args.output, 'w')
was_gzipped = True
elif args.input.endswith('.fastq'):
f = open(args.input, 'r')
fout = open(args.output, 'w')
else:
print "Not supported"
sys.exit(1)
num_lines = sum(1 for line in f)
f.seek(0)
i = 0
while i < num_lines:
header = f.readline()
seq = list(f.readline())
misc = f.readline()
qual = list(f.readline())
# Find all N chars
pos = [n for (n, q) in enumerate(seq) if q == 'N']
#Update to '2'
for p in pos:
qual[p] = nQ
fout.write(header)
fout.write(''.join(seq))
fout.write(misc)
fout.write(''.join(qual))
i = i+4
print "Done - processed a total of %i reads" % (i/4)
if __name__ == '__main__':
try:
start_time = time.time()
desc = __doc__.strip()
parser = argparse.ArgumentParser(description=desc,epilog=epi)
parser.add_argument('-v', '--verbose', action='store_true',
default=False, help='verbose output')
parser.add_argument('--version', action='version',
version='%(prog)s ' + meta.__version__)
parser.add_argument('-q', '--qual_enc', action='store',
default=33, help='Quality encoding (33 | 64)')
parser.add_argument('input', action='store',
type=str, help='Full path to the input file')
parser.add_argument('output', action='store', type=str,
help='Full path to the output file')
parser.set_defaults(func=reset_quality)
args = parser.parse_args()
args.func(args)
if args.verbose: print "Executing @ " + time.asctime()
if args.verbose: print "Ended @ " + time.asctime()
if args.verbose: print 'total time in minutes:',
if args.verbose: print (time.time() - start_time) / 60.0
sys.exit(0)
except KeyboardInterrupt, e: # Ctrl-C
raise e
except SystemExit, e: # sys.exit()
raise e
except Exception, e:
print 'ERROR, UNEXPECTED EXCEPTION'
print str(e)
traceback.print_exc()
os._exit(1)
| mscook/rQer | rQer.py | Python | apache-2.0 | 3,743 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
from glob import glob
class Cuda(Package):
"""CUDA is a parallel computing platform and programming model invented
by NVIDIA. It enables dramatic increases in computing performance by
harnessing the power of the graphics processing unit (GPU).
Note: This package does not currently install the drivers necessary
to run CUDA. These will need to be installed manually. See:
https://docs.nvidia.com/cuda/ for details."""
homepage = "https://developer.nvidia.com/cuda-zone"
version('9.2.88', 'dd6e33e10d32a29914b7700c7b3d1ca0', expand=False,
url="https://developer.nvidia.com/compute/cuda/9.2/Prod/local_installers/cuda_9.2.88_396.26_linux")
version('9.1.85', '67a5c3933109507df6b68f80650b4b4a', expand=False,
url="https://developer.nvidia.com/compute/cuda/9.1/Prod/local_installers/cuda_9.1.85_387.26_linux")
version('9.0.176', '7a00187b2ce5c5e350e68882f42dd507', expand=False,
url="https://developer.nvidia.com/compute/cuda/9.0/Prod/local_installers/cuda_9.0.176_384.81_linux-run")
version('8.0.61', '33e1bd980e91af4e55f3ef835c103f9b', expand=False,
url="https://developer.nvidia.com/compute/cuda/8.0/Prod2/local_installers/cuda_8.0.61_375.26_linux-run")
version('8.0.44', '6dca912f9b7e2b7569b0074a41713640', expand=False,
url="https://developer.nvidia.com/compute/cuda/8.0/prod/local_installers/cuda_8.0.44_linux-run")
version('7.5.18', '4b3bcecf0dfc35928a0898793cf3e4c6', expand=False,
url="http://developer.download.nvidia.com/compute/cuda/7.5/Prod/local_installers/cuda_7.5.18_linux.run")
version('6.5.14', '90b1b8f77313600cc294d9271741f4da', expand=False,
url="http://developer.download.nvidia.com/compute/cuda/6_5/rel/installers/cuda_6.5.14_linux_64.run")
def install(self, spec, prefix):
runfile = glob(join_path(self.stage.path, 'cuda*_linux*'))[0]
chmod = which('chmod')
chmod('+x', runfile)
runfile = which(runfile)
# Note: NVIDIA does not officially support many newer versions of
# compilers. For example, on CentOS 6, you must use GCC 4.4.7 or
# older. See:
# http://docs.nvidia.com/cuda/cuda-installation-guide-linux/#system-requirements
# https://gist.github.com/ax3l/9489132
# for details.
runfile(
'--silent', # disable interactive prompts
'--verbose', # create verbose log file
'--override', # override compiler version checks
'--toolkit', # install CUDA Toolkit
'--toolkitpath=%s' % prefix
)
| tmerrick1/spack | var/spack/repos/builtin/packages/cuda/package.py | Python | lgpl-2.1 | 3,901 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
import re
def multiple_split(_str, splits):
res = [_str]
for sp in splits:
tmp = []
# Why?
list(map(lambda x: tmp.extend(x.split(sp)), res))
res = tmp
return [x for x in res if x]
if __name__ == '__main__':
s = 'ab;cd|efg|hi,,jkl|mn\topq:rst,uvw\nxyz'
print(multiple_split(s, ',:;\n\t'))
print(re.split(r'[,:;\n\t]+', s))
| quietcoolwu/python-playground | imooc/python_advanced/4_1_multiple_split.py | Python | mit | 486 |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# 2015-04-30T10:25+08:00
print(type(Ellipsis))
print(type(NotImplemented))
# memoryview is a built-in class.
v = memoryview(b'abcde')
print(v[0])
#v[0] -= 32 # Read only memory.
print(v)
b = bytes(v[1:4])
print(b)
| myd7349/DiveIntoPython3Practices | chapter_17_PortingCodeToPython3With2to3/types.py | Python | lgpl-3.0 | 265 |
# -*- coding: utf-8 -*-
import re
from twisted.web.http import BAD_REQUEST
from helper.database import DatabaseHelper
from helper.resource import YuzukiResource
from helper.template import render_template
from model.group import Group
from model.user import User
class Register(YuzukiResource):
def __init__(self):
YuzukiResource.__init__(self)
dbsession = DatabaseHelper.session()
query = dbsession.query(Group).filter(Group.important)
result = query.all()
self.bunryu_groups = [(group.uid, group.name) for group in result]
dbsession.close()
def render_GET(self, request):
context = {"group_meta": self.bunryu_groups}
return render_template("register.html", request, context)
def render_POST(self, request):
username = request.get_argument("username") or None
nickname = request.get_argument("nickname") or None
password = request.get_argument("password") or None
pd_realname = request.get_argument("pd_realname") or None
pd_email = request.get_argument("pd_email") or None
pd_address = request.get_argument("pd_address") or None
pd_phone = request.get_argument("pd_phone") or None
pd_bunryu = request.get_argument("pd_bunryu") or None
pd_bio = request.get_argument("pd_bio") or None
new_user = User(username, nickname, password, pd_realname, pd_email, pd_address, pd_phone, pd_bunryu, pd_bio)
err = self.check_user_data_valid(request, new_user)
if not err:
request.dbsession.add(new_user)
request.dbsession.commit()
request.redirect("/welcome")
return "registered successfully"
else:
request.setResponseCode(BAD_REQUEST)
context = {
"group_meta": self.bunryu_groups,
"err": err,
}
return render_template("register.html", request, context)
def check_user_data_valid(self, request, new_user):
# empty value check
if not new_user.username:
return u"ID는 비어있을 수 없습니다."
if not new_user.nickname:
return u"별명은 비어있을 수 없습니다."
if not new_user.password:
return u"비밀번호는 비어있을 수 없습니다."
if not new_user.pd_realname:
return u"실명은 비어있을 수 없습니다."
if not new_user.pd_bunryu:
return u"분류는 비어있을 수 없습니다."
# nickname username regex validity check
if not re.match(u"^[-_a-zA-Z가-힣\\d\\(\\)]{1,}$", new_user.username):
return u"ID는 영문, 한글, 숫자, 붙임표(-), 밑줄(_)과 괄호만 사용할 수 있습니다."
if not re.match(u"^[-_a-zA-Z가-힣\\d\\(\\)]{1,}$", new_user.nickname):
return u"별명은 영문, 한글, 숫자, 붙임표(-), 밑줄(_)과 괄호만 사용할 수 있습니다."
# duplicate value check
user_query = request.dbsession.query(User)
query = user_query.filter(User.username == new_user.username)
if request.dbsession.query(query.exists()).scalar():
return u"이미 사용되고 있는 ID입니다."
query = user_query.filter(User.nickname == new_user.nickname)
if request.dbsession.query(query.exists()).scalar():
return u"이미 사용되고 있는 별명입니다."
# bunryu existence check
query = request.dbsession.query(Group).filter(Group.uid == new_user.pd_bunryu)
result = query.all()
if not result:
return u"존재하지 않는 분류 그룹입니다."
else:
group = result[0]
group.users.append(new_user)
# all green
return None
| Perlmint/Yuzuki | resource/register.py | Python | mit | 3,820 |
'''@author: lockrecv@gmail.com'''
import unittest
from src.util.PowerOn import PowerOn
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testName(self):
poweron = PowerOn("../../src/config/power-on.json")
poweron.toString()
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | ylcrow/poweron | test/util/PowerOnTest.py | Python | mit | 406 |
from libsaas import http, parsers
from libsaas.services import base
from . import resource
class SubscriptionsBase(resource.RecurlyResource):
path = 'subscriptions'
def delete(self, *args, **kwargs):
raise base.MethodNotSupported()
class Subscriptions(SubscriptionsBase):
@base.apimethod
def get(self, state='live', cursor=None, per_page=None):
"""
Fetch all your subscription.
:var state: The state of subscriptions to return:
"active", "canceled", "expired", "future", "in_trial", "live",
or "past_due". A subscription will belong to more than one state.
:vartype state: str
"""
params = base.get_params(None, locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_xml
def update(self, *args, **kwargs):
raise base.MethodNotSupported()
class Subscription(SubscriptionsBase):
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
@base.apimethod
def cancel(self):
"""
Cancel a subscription, remaining it as active until next billing cycle.
"""
self.require_item()
url = '{0}/cancel'.format(self.get_url())
request = http.Request('PUT', url)
request.use_xml = False
return request, parsers.parse_empty
@base.apimethod
def reactivate(self):
"""
Reactivating a canceled subscription.
"""
self.require_item()
url = '{0}/reactivate'.format(self.get_url())
request = http.Request('PUT', url)
return request, parsers.parse_empty
@base.apimethod
def terminate(self, refund=None):
"""
Terminate a subsciription, removing any stored billing information.
:var refund: The type of the refund to perform: 'full' or 'partial'
Defaults to 'none'.
:vartype refund: str
"""
self.require_item()
url = '{0}/terminate'.format(self.get_url())
params = {
'refund': refund if refund else 'none'
}
url = url + '?' + http.urlencode_any(params)
request = http.Request('PUT', url)
return request, parsers.parse_empty
@base.apimethod
def postpone(self, next_renewal_date):
"""
Postpone a subscription
:var next_renewal_date: The next renewal date that will be applied
:vartype next_renewal_date: str
"""
self.require_item()
url = '{0}/postpone'.format(self.get_url())
params = {'next_renewal_date': next_renewal_date}
url = url + '?' + http.urlencode_any(params)
request = http.Request('PUT', url)
return request, parsers.parse_empty
class AccountSubscriptions(Subscriptions):
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
def update(self, *args, **kwargs):
raise base.MethodNotSupported()
| ducksboard/libsaas | libsaas/services/recurly/subscriptions.py | Python | mit | 2,990 |
#!/usr/bin/env python
# coding:utf-8
import errno
import time
import struct
import zlib
import functools
import re
import io
import string
import socket
import ssl
import httplib
import Queue
import urlparse
import threading
from proxy import xlog
from connect_manager import https_manager
from appids_manager import appid_manager
import OpenSSL
NetWorkIOError = (socket.error, ssl.SSLError, OpenSSL.SSL.Error, OSError)
from config import config
from google_ip import google_ip
def generate_message_html(title, banner, detail=''):
MESSAGE_TEMPLATE = '''
<html><head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<title>$title</title>
<style><!--
body {font-family: arial,sans-serif}
div.nav {margin-top: 1ex}
div.nav A {font-size: 10pt; font-family: arial,sans-serif}
span.nav {font-size: 10pt; font-family: arial,sans-serif; font-weight: bold}
div.nav A,span.big {font-size: 12pt; color: #0000cc}
div.nav A {font-size: 10pt; color: black}
A.l:link {color: #6f6f6f}
A.u:link {color: green}
//--></style>
</head>
<body text=#000000 bgcolor=#ffffff>
<table border=0 cellpadding=2 cellspacing=0 width=100%>
<tr><td bgcolor=#3366cc><font face=arial,sans-serif color=#ffffff><b>Message</b></td></tr>
<tr><td> </td></tr></table>
<blockquote>
<H1>$banner</H1>
$detail
<p>
</blockquote>
<table width=100% cellpadding=0 cellspacing=0><tr><td bgcolor=#3366cc><img alt="" width=1 height=4></td></tr></table>
</body></html>
'''
return string.Template(MESSAGE_TEMPLATE).substitute(title=title, banner=banner, detail=detail)
def spawn_later(seconds, target, *args, **kwargs):
def wrap(*args, **kwargs):
__import__('time').sleep(seconds)
try:
result = target(*args, **kwargs)
except:
result = None
return result
return __import__('thread').start_new_thread(wrap, args, kwargs)
skip_headers = frozenset(['Vary',
'Via',
'X-Google-Cache-Control',
'X-Forwarded-For',
'Proxy-Authorization',
'Proxy-Connection',
'Upgrade',
'X-Chrome-Variations',
'Connection',
'Cache-Control'
])
def send_header(wfile, keyword, value):
keyword = keyword.title()
if keyword == 'Set-Cookie':
# https://cloud.google.com/appengine/docs/python/urlfetch/responseobjects
for cookie in re.split(r', (?=[^ =]+(?:=|$))', value):
wfile.write("%s: %s\r\n" % (keyword, cookie))
#logging.debug("Head1 %s: %s", keyword, cookie)
elif keyword == 'Content-Disposition' and '"' not in value:
value = re.sub(r'filename=([^"\']+)', 'filename="\\1"', value)
wfile.write("%s: %s\r\n" % (keyword, value))
#logging.debug("Head1 %s: %s", keyword, value)
else:
wfile.write("%s: %s\r\n" % (keyword, value))
#logging.debug("Head1 %s: %s", keyword, value)
def _request(sock, headers, payload, bufsize=8192):
request_data = 'POST /_gh/ HTTP/1.1\r\n'
request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers)
request_data += '\r\n'
if isinstance(payload, bytes):
sock.send(request_data.encode())
payload_len = len(payload)
start = 0
while start < payload_len:
send_size = min(payload_len - start, 65535)
sended = sock.send(payload[start:start+send_size])
start += sended
elif hasattr(payload, 'read'):
sock.send(request_data)
while True:
data = payload.read(bufsize)
if not data:
break
sock.send(data)
else:
raise TypeError('_request(payload) must be a string or buffer, not %r' % type(payload))
response = httplib.HTTPResponse(sock, buffering=True)
try:
orig_timeout = sock.gettimeout()
sock.settimeout(100)
response.begin()
sock.settimeout(orig_timeout)
except httplib.BadStatusLine as e:
#logging.warn("_request bad status line:%r", e)
response.close()
response = None
except Exception as e:
xlog.warn("_request:%r", e)
return response
class GAE_Exception(BaseException):
def __init__(self, type, message):
xlog.debug("GAE_Exception %r %r", type, message)
self.type = type
self.message = message
def request(headers={}, payload=None):
max_retry = 3
for i in range(max_retry):
ssl_sock = None
try:
ssl_sock = https_manager.get_ssl_connection()
if not ssl_sock:
xlog.debug('create_ssl_connection fail')
continue
if ssl_sock.host == '':
ssl_sock.appid = appid_manager.get_appid()
if not ssl_sock.appid:
google_ip.report_connect_closed(ssl_sock.ip, "no appid")
time.sleep(60)
raise GAE_Exception(1, "no appid can use")
headers['Host'] = ssl_sock.appid + ".appspot.com"
ssl_sock.host = headers['Host']
else:
headers['Host'] = ssl_sock.host
response = _request(ssl_sock, headers, payload)
if not response:
google_ip.report_connect_closed(ssl_sock.ip, "request_fail")
ssl_sock.close()
continue
response.ssl_sock = ssl_sock
return response
except Exception as e:
xlog.exception('request failed:%s', e)
if ssl_sock:
google_ip.report_connect_closed(ssl_sock.ip, "request_except")
ssl_sock.close()
raise GAE_Exception(2, "try max times")
def inflate(data):
return zlib.decompress(data, -zlib.MAX_WBITS)
def deflate(data):
return zlib.compress(data)[2:-4]
def fetch(method, url, headers, body):
if isinstance(body, basestring) and body:
if len(body) < 10 * 1024 * 1024 and 'Content-Encoding' not in headers:
zbody = deflate(body)
if len(zbody) < len(body):
body = zbody
headers['Content-Encoding'] = 'deflate'
if len(body) > 10 * 1024 * 1024:
xlog.warn("body len:%d %s %s", len(body), method, url)
headers['Content-Length'] = str(len(body))
# GAE donot allow set `Host` header
if 'Host' in headers:
del headers['Host']
kwargs = {}
if config.GAE_PASSWORD:
kwargs['password'] = config.GAE_PASSWORD
#kwargs['options'] =
#kwargs['validate'] =
kwargs['maxsize'] = config.AUTORANGE_MAXSIZE
kwargs['timeout'] = '19'
payload = '%s %s HTTP/1.1\r\n' % (method, url)
payload += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers)
#for k, v in headers.items():
# logging.debug("Send %s: %s", k, v)
payload += ''.join('X-URLFETCH-%s: %s\r\n' % (k, v) for k, v in kwargs.items() if v)
request_headers = {}
payload = deflate(payload)
body = '%s%s%s' % (struct.pack('!h', len(payload)), payload, body)
request_headers['Content-Length'] = str(len(body))
response = request(request_headers, body)
response.app_msg = ''
response.app_status = response.status
if response.app_status != 200:
return response
data = response.read(2)
if len(data) < 2:
xlog.warn("fetch too short lead byte len:%d %s", len(data), url)
response.app_status = 502
response.fp = io.BytesIO(b'connection aborted. too short lead byte data=' + data)
response.read = response.fp.read
return response
headers_length, = struct.unpack('!h', data)
data = response.read(headers_length)
if len(data) < headers_length:
xlog.warn("fetch too short header need:%d get:%d %s", headers_length, len(data), url)
response.app_status = 509
response.fp = io.BytesIO(b'connection aborted. too short headers data=' + data)
response.read = response.fp.read
return response
response.ssl_sock.received_size += headers_length
raw_response_line, headers_data = inflate(data).split('\r\n', 1)
_, response.status, response.reason = raw_response_line.split(None, 2)
response.status = int(response.status)
response.reason = response.reason.strip()
response.msg = httplib.HTTPMessage(io.BytesIO(headers_data))
response.app_msg = response.msg.fp.read()
return response
normcookie = functools.partial(re.compile(', ([^ =]+(?:=|$))').sub, '\\r\\nSet-Cookie: \\1')
normattachment = functools.partial(re.compile(r'filename=(.+?)').sub, 'filename="\\1"')
def send_response(wfile, status=404, headers={}, body=''):
headers = dict((k.title(), v) for k, v in headers.items())
if 'Transfer-Encoding' in headers:
del headers['Transfer-Encoding']
if 'Content-Length' not in headers:
headers['Content-Length'] = len(body)
if 'Connection' not in headers:
headers['Connection'] = 'close'
wfile.write("HTTP/1.1 %d\r\n" % status)
for key, value in headers.items():
#wfile.write("%s: %s\r\n" % (key, value))
send_header(wfile, key, value)
wfile.write("\r\n")
wfile.write(body)
def return_fail_message(wfile):
html = generate_message_html('504 GAEProxy Proxy Time out', u'连接超时,先休息一会再来!')
send_response(wfile, 504, body=html.encode('utf-8'))
return
# fix bug for android market app: Mobogenie
# GAE url_fetch refuse empty value in header.
def clean_empty_header(headers):
remove_list = []
for key in headers:
value = headers[key]
if value == "":
remove_list.append(key)
for key in remove_list:
del headers[key]
return headers
def handler(method, url, headers, body, wfile):
time_request = time.time()
headers = clean_empty_header(headers)
errors = []
response = None
while True:
if time.time() - time_request > 30: #time out
return return_fail_message(wfile)
try:
response = fetch(method, url, headers, body)
if response.app_status != 200:
xlog.warn("fetch gae status:%s url:%s", response.app_status, url)
try:
server_type = response.getheader('Server', "")
if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type:
xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type)
google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
response.close()
continue
except Exception as e:
errors.append(e)
xlog.warn('gae_handler.handler %r %s , retry...', e, url)
continue
if response.app_status == 404:
#xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip)
google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist")
appid = appid_manager.get_appid()
if not appid:
html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid')
send_response(wfile, 404, body=html.encode('utf-8'))
response.close()
return
else:
response.close()
continue
if response.app_status == 403 or response.app_status == 405: #Method not allowed
# google have changed from gws to gvs, need to remove.
xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip)
# some ip can connect, and server type is gws
# but can't use as GAE server
# so we need remove it immediately
google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
response.close()
continue
if response.app_status == 503:
xlog.warning('APPID %r out of Quota, remove it. %s', response.ssl_sock.appid, response.ssl_sock.ip)
appid_manager.report_out_of_quota(response.ssl_sock.appid)
google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota")
appid = appid_manager.get_appid()
if not appid:
html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid')
send_response(wfile, 503, body=html.encode('utf-8'))
response.close()
return
else:
response.close()
continue
if response.app_status < 500:
break
except GAE_Exception as e:
errors.append(e)
xlog.warn("gae_exception:%r %s", e, url)
except Exception as e:
errors.append(e)
xlog.exception('gae_handler.handler %r %s , retry...', e, url)
if response.status == 206:
return RangeFetch(method, url, headers, body, response, wfile).fetch()
try:
wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
response_headers = {}
for key, value in response.getheaders():
key = key.title()
if key == 'Transfer-Encoding':
#http://en.wikipedia.org/wiki/Chunked_transfer_encoding
continue
if key in skip_headers:
continue
response_headers[key] = value
if 'X-Head-Content-Length' in response_headers:
if method == "HEAD":
response_headers['Content-Length'] = response_headers['X-Head-Content-Length']
del response_headers['X-Head-Content-Length']
send_to_browser = True
try:
for key in response_headers:
value = response_headers[key]
send_header(wfile, key, value)
#logging.debug("Head- %s: %s", key, value)
wfile.write("\r\n")
except Exception as e:
send_to_browser = False
xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time()-time_request, e, url)
if len(response.app_msg):
xlog.warn("APPID error:%d url:%s", response.status, url)
wfile.write(response.app_msg)
google_ip.report_connect_closed(response.ssl_sock.ip, "app err")
response.close()
return
content_length = int(response.getheader('Content-Length', 0))
content_range = response.getheader('Content-Range', '')
if content_range:
start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
else:
start, end, length = 0, content_length-1, content_length
body_length = end - start + 1
last_read_time = time.time()
time_response = time.time()
while True:
if start > end:
time_finished = time.time()
if body_length > 1024 and time_finished - time_response > 0:
speed = body_length / (time_finished - time_response)
xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s",
response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000,
length, response.ssl_sock.handshake_time, int(speed), response.status, url)
else:
xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s",
response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000,
length, response.ssl_sock.handshake_time, response.status, url)
response.ssl_sock.received_size += body_length
https_manager.save_ssl_connection_for_reuse(response.ssl_sock, call_time=time_request)
return
data = response.read(config.AUTORANGE_BUFSIZE)
if not data:
if time.time() - last_read_time > 20:
google_ip.report_connect_closed(response.ssl_sock.ip, "down fail")
response.close()
xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url)
return
else:
time.sleep(0.1)
continue
last_read_time = time.time()
data_len = len(data)
start += data_len
if send_to_browser:
try:
ret = wfile.write(data)
if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
xlog.debug("send to browser wfile.write ret:%d", ret)
ret = wfile.write(data)
except Exception as e_b:
if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b):
xlog.warn('gae_handler send to browser return %r %r', e_b, url)
else:
xlog.warn('gae_handler send to browser return %r %r', e_b, url)
send_to_browser = False
except NetWorkIOError as e:
time_except = time.time()
time_cost = time_except - time_request
if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e):
xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url)
google_ip.report_connect_closed(response.ssl_sock.ip, "Net")
else:
xlog.exception("gae_handler except:%r %s", e, url)
except Exception as e:
xlog.exception("gae_handler except:%r %s", e, url)
class RangeFetch(object):
threads = config.AUTORANGE_THREADS
maxsize = config.AUTORANGE_MAXSIZE
bufsize = config.AUTORANGE_BUFSIZE
waitsize = config.AUTORANGE_WAITSIZE
def __init__(self, method, url, headers, body, response, wfile):
self.method = method
self.wfile = wfile
self.url = url
self.headers = headers
self.body = body
self.response = response
self._stopped = False
self._last_app_status = {}
self.expect_begin = 0
def fetch(self):
response_headers = dict((k.title(), v) for k, v in self.response.getheaders())
content_range = response_headers['Content-Range']
start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
if start == 0:
response_headers['Content-Length'] = str(length)
del response_headers['Content-Range']
else:
response_headers['Content-Range'] = 'bytes %s-%s/%s' % (start, end, length)
response_headers['Content-Length'] = str(length-start)
xlog.info('>>>>>>>>>>>>>>> RangeFetch started(%r) %d-%d', self.url, start, end)
try:
self.wfile.write("HTTP/1.1 200 OK\r\n")
for key in response_headers:
if key == 'Transfer-Encoding':
continue
if key == 'X-Head-Content-Length':
continue
if key in skip_headers:
continue
value = response_headers[key]
#logging.debug("Head %s: %s", key.title(), value)
send_header(self.wfile, key, value)
self.wfile.write("\r\n")
except Exception as e:
self._stopped = True
xlog.warn("RangeFetch send response fail:%r %s", e, self.url)
return
data_queue = Queue.PriorityQueue()
range_queue = Queue.PriorityQueue()
range_queue.put((start, end, self.response))
self.expect_begin = start
for begin in range(end+1, length, self.maxsize):
range_queue.put((begin, min(begin+self.maxsize-1, length-1), None))
for i in xrange(0, self.threads):
range_delay_size = i * self.maxsize
spawn_later(float(range_delay_size)/self.waitsize, self.__fetchlet, range_queue, data_queue, range_delay_size)
has_peek = hasattr(data_queue, 'peek')
peek_timeout = 120
while self.expect_begin < length - 1:
try:
if has_peek:
begin, data = data_queue.peek(timeout=peek_timeout)
if self.expect_begin == begin:
data_queue.get()
elif self.expect_begin < begin:
time.sleep(0.1)
continue
else:
xlog.error('RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin)
break
else:
begin, data = data_queue.get(timeout=peek_timeout)
if self.expect_begin == begin:
pass
elif self.expect_begin < begin:
data_queue.put((begin, data))
time.sleep(0.1)
continue
else:
xlog.error('RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin)
break
except Queue.Empty:
xlog.error('data_queue peek timeout, break')
break
try:
ret = self.wfile.write(data)
if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
xlog.debug("send to browser wfile.write ret:%d, retry", ret)
ret = self.wfile.write(data)
xlog.debug("send to browser wfile.write ret:%d", ret)
self.expect_begin += len(data)
del data
except Exception as e:
xlog.warn('RangeFetch client closed(%s). %s', e, self.url)
break
self._stopped = True
def __fetchlet(self, range_queue, data_queue, range_delay_size):
headers = dict((k.title(), v) for k, v in self.headers.items())
headers['Connection'] = 'close'
while not self._stopped:
try:
try:
start, end, response = range_queue.get(timeout=1)
if self.expect_begin < start and data_queue.qsize() * self.bufsize + range_delay_size > 30*1024*1024:
range_queue.put((start, end, response))
time.sleep(10)
continue
headers['Range'] = 'bytes=%d-%d' % (start, end)
if not response:
response = fetch(self.method, self.url, headers, self.body)
except Queue.Empty:
continue
except Exception as e:
xlog.warning("RangeFetch fetch response %r in __fetchlet", e)
range_queue.put((start, end, None))
continue
if not response:
xlog.warning('RangeFetch %s return %r', headers['Range'], response)
range_queue.put((start, end, None))
continue
if response.app_status != 200:
xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range'])
if response.app_status == 404:
xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip)
appid = appid_manager.get_appid()
if not appid:
xlog.error("no appid left")
self._stopped = True
response.close()
return
if response.app_status == 503:
xlog.warning('APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid)
appid_manager.report_out_of_quota(response.ssl_sock.appid)
appid = appid_manager.get_appid()
if not appid:
xlog.error("no appid left")
self._stopped = True
response.close()
return
google_ip.report_connect_closed(response.ssl_sock.ip, "app err")
response.close()
range_queue.put((start, end, None))
continue
if response.getheader('Location'):
self.url = urlparse.urljoin(self.url, response.getheader('Location'))
xlog.info('RangeFetch Redirect(%r)', self.url)
google_ip.report_connect_closed(response.ssl_sock.ip, "reLocation")
response.close()
range_queue.put((start, end, None))
continue
if 200 <= response.status < 300:
content_range = response.getheader('Content-Range')
if not content_range:
xlog.warning('RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s',
self.method, self.url, content_range, response.getheaders(), start, end)
google_ip.report_connect_closed(response.ssl_sock.ip, "no range")
response.close()
range_queue.put((start, end, None))
continue
content_length = int(response.getheader('Content-Length', 0))
xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range)
time_last_read = time.time()
while start < end + 1:
try:
data = response.read(self.bufsize)
if not data:
if time.time() - time_last_read > 20:
break
else:
time.sleep(0.1)
continue
time_last_read = time.time()
data_len = len(data)
data_queue.put((start, data))
start += data_len
except Exception as e:
xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e)
break
if start < end + 1:
xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end)
google_ip.report_connect_closed(response.ssl_sock.ip, "down err")
response.close()
range_queue.put((start, end, None))
continue
https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1)
else:
xlog.error('RangeFetch %r return %s', self.url, response.status)
google_ip.report_connect_closed(response.ssl_sock.ip, "status err")
response.close()
range_queue.put((start, end, None))
continue
except StandardError as e:
xlog.exception('RangeFetch._fetchlet error:%s', e)
raise
| hexlism/xx_net | gae_proxy/local/gae_handler.py | Python | bsd-2-clause | 28,513 |
# -*- coding: utf-8 -*-
from Products.CMFPlone.utils import safe_unicode
from zope.i18nmessageid import MessageFactory
import gettext
import pycountry
import unicodedata
message_factory = MessageFactory('bda.plone.orders')
def safe_encode(string):
"""Safely unicode objects to UTF-8. If it's a binary string, just return
it.
"""
if isinstance(string, basestring):
return safe_unicode(string).encode('utf-8')
return string
def safe_filename(value):
"""
Convert to ASCII if 'allow_unicode' is False. Convert spaces to hyphens.
Remove characters that aren't alphanumerics, underscores, or hyphens.
Convert to lowercase. Also strip leading and trailing whitespace.
Ideas from:
https://github.com/django/django/blob/master/django/utils/text.py
"""
value = safe_unicode(value)
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
return value.replace(' ', '-').strip()
def get_country_name(country_code, lang='en'):
"""Return the translated country name for a given pycountry country code.
"""
country_name = pycountry.countries.get(numeric=country_code).name
trans = gettext.translation(
'iso3166', pycountry.LOCALES_DIR, languages=['de']
)
return safe_unicode(trans.gettext(country_name))
| andreesg/bda.plone.orders | src/bda/plone/orders/__init__.py | Python | bsd-3-clause | 1,313 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
from oslo.config import cfg
from quantum.common import constants
from quantum.db import agents_db
from quantum.db import agentschedulers_db
from quantum.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class ChanceScheduler(object):
"""Allocate a DHCP agent for a network in a random way.
More sophisticated scheduler (similar to filter scheduler in nova?)
can be introduced later.
"""
def _schedule_bind_network(self, context, agent, network_id):
binding = agentschedulers_db.NetworkDhcpAgentBinding()
binding.dhcp_agent = agent
binding.network_id = network_id
context.session.add(binding)
LOG.debug(_('Network %(network_id)s is scheduled to be hosted by '
'DHCP agent %(agent_id)s'),
{'network_id': network_id,
'agent_id': agent})
def schedule(self, plugin, context, network):
"""Schedule the network to active DHCP agent(s).
A list of scheduled agents is returned.
"""
agents_per_network = cfg.CONF.dhcp_agents_per_network
#TODO(gongysh) don't schedule the networks with only
# subnets whose enable_dhcp is false
with context.session.begin(subtransactions=True):
dhcp_agents = plugin.get_dhcp_agents_hosting_networks(
context, [network['id']], active=True)
if len(dhcp_agents) >= agents_per_network:
LOG.debug(_('Network %s is hosted already'),
network['id'])
return
n_agents = agents_per_network - len(dhcp_agents)
enabled_dhcp_agents = plugin.get_agents_db(
context, filters={
'agent_type': [constants.AGENT_TYPE_DHCP],
'admin_state_up': [True]})
if not enabled_dhcp_agents:
LOG.warn(_('No more DHCP agents'))
return
active_dhcp_agents = [
agent for agent in set(enabled_dhcp_agents)
if not agents_db.AgentDbMixin.is_agent_down(
agent['heartbeat_timestamp'])
and agent not in dhcp_agents
]
if not active_dhcp_agents:
LOG.warn(_('No more DHCP agents'))
return
n_agents = min(len(active_dhcp_agents), n_agents)
chosen_agents = random.sample(active_dhcp_agents, n_agents)
for agent in chosen_agents:
self._schedule_bind_network(context, agent, network['id'])
return chosen_agents
def auto_schedule_networks(self, plugin, context, host):
"""Schedule non-hosted networks to the DHCP agent on
the specified host.
"""
agents_per_network = cfg.CONF.dhcp_agents_per_network
with context.session.begin(subtransactions=True):
query = context.session.query(agents_db.Agent)
query = query.filter(agents_db.Agent.agent_type ==
constants.AGENT_TYPE_DHCP,
agents_db.Agent.host == host,
agents_db.Agent.admin_state_up == True)
dhcp_agents = query.all()
for dhcp_agent in dhcp_agents:
if agents_db.AgentDbMixin.is_agent_down(
dhcp_agent.heartbeat_timestamp):
LOG.warn(_('DHCP agent %s is not active'), dhcp_agent.id)
continue
fields = ['network_id', 'enable_dhcp']
subnets = plugin.get_subnets(context, fields=fields)
net_ids = set(s['network_id'] for s in subnets
if s['enable_dhcp'])
if not net_ids:
LOG.debug(_('No non-hosted networks'))
return False
for net_id in net_ids:
agents = plugin.get_dhcp_agents_hosting_networks(
context, [net_id], active=True)
if len(agents) >= agents_per_network:
continue
if any(dhcp_agent.id == agent.id for agent in agents):
continue
binding = agentschedulers_db.NetworkDhcpAgentBinding()
binding.dhcp_agent = dhcp_agent
binding.network_id = net_id
context.session.add(binding)
return True
| linvictor88/vse-lbaas-driver | quantum/scheduler/dhcp_agent_scheduler.py | Python | apache-2.0 | 5,180 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import mock
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.test.fake import fakemaster
from buildbot.test.fake import fakeprotocol
from buildbot.test.util import protocols
from buildbot.test.util.misc import TestReactorMixin
from buildbot.worker.protocols import base
class TestListener(TestReactorMixin, unittest.TestCase):
@defer.inlineCallbacks
def test_constructor(self):
self.setUpTestReactor()
master = fakemaster.make_master(self)
listener = base.Listener()
yield listener.setServiceParent(master)
self.assertEqual(listener.master, master)
class TestFakeConnection(protocols.ConnectionInterfaceTest,
TestReactorMixin, unittest.TestCase):
def setUp(self):
self.setUpTestReactor()
self.master = fakemaster.make_master(self)
self.worker = mock.Mock()
self.conn = fakeprotocol.FakeConnection(self.master, self.worker)
class TestConnection(protocols.ConnectionInterfaceTest,
TestReactorMixin, unittest.TestCase):
def setUp(self):
self.setUpTestReactor()
self.master = fakemaster.make_master(self)
self.worker = mock.Mock()
self.conn = base.Connection(self.master, self.worker)
def test_constructor(self):
self.assertEqual(self.conn.master, self.master)
self.assertEqual(self.conn.worker, self.worker)
def test_notify(self):
cb = mock.Mock()
self.conn.notifyOnDisconnect(cb)
self.assertEqual(cb.call_args_list, [])
self.conn.notifyDisconnected()
self.assertNotEqual(cb.call_args_list, [])
| cmouse/buildbot | master/buildbot/test/unit/worker/test_protocols_base.py | Python | gpl-2.0 | 2,377 |
import operator
import os
from checksum import calc_checksum
import progress
__author__ = 'snorri.sturluson'
def verify_cache(index, res_folder):
num_files = len(index)
missing = 0
corrupt = 0
scanned = 0
for entry in index:
scanned += 1
progress.write("Scanned %6.1d of %6.1d files (%6.1d corrupt, %6.1d missing)\r" %
(scanned, num_files, corrupt, missing))
filename = os.path.join(res_folder, entry.cached_name)
if not os.path.exists(filename):
missing += 1
continue
checksum = calc_checksum(filename)
if checksum != entry.md5_checksum:
corrupt += 1
try:
os.remove(filename)
except IOError:
pass
progress.clear()
print "Verified %d files:" % num_files
print " %6.1d files corrupt" % corrupt
print " %6.1d files not yet downloaded" % missing
return corrupt, missing
| evem8/rescache | verify.py | Python | mit | 979 |
# -*- coding: utf-8 -*-
"""
Helper functions used in views.
"""
from __future__ import division
import calendar
import csv
from datetime import datetime
from functools import wraps
from json import dumps
from threading import Lock
from time import time
from xml.etree import ElementTree
from flask import Response
from main import app
import logging
log = logging.getLogger(__name__) # pylint: disable=invalid-name
CACHE = {}
LOCK = Lock()
def jsonify(function):
"""
Creates a response with the JSON representation of wrapped function result.
"""
@wraps(function)
def inner(*args, **kwargs):
"""
This docstring will be overridden by @wraps decorator.
"""
return Response(
dumps(function(*args, **kwargs)),
mimetype='application/json'
)
return inner
def cache(period):
"""
Caching decorator. Refreshes cached data if time expired.
Cache dict structure:
CACHE = {
function.__name__: {
'last_caching': 1493018167.846858
'data': function(*args, **kwargs)
},
}
"""
def decorating_wrapper(function):
def caching_wrapper(*args, **kwargs):
global CACHE
func_name = function.__name__
try:
if (time() - CACHE[func_name]['last_caching']) > period:
CACHE[func_name] = {
'data': function(*args, **kwargs),
'last_caching': time()
}
except KeyError:
with LOCK:
CACHE.setdefault(func_name, {})['data'] = function(
*args,
**kwargs
)
CACHE.setdefault(func_name, {})['last_caching'] = time()
return CACHE[func_name]['data']
return caching_wrapper
return decorating_wrapper
@cache(600)
def get_data():
"""
Extracts data from XML file, tries to match user_id from
CSV file and binds it.
It creates structure like this:
data = {
user_id = {
'name': 'Jan K.',
'presence': {
datetime.date(2013, 10, 1): {
'start': datetime.time(9, 0, 0),
'end': datetime.time(17, 30, 0),
},
datetime.date(2013, 10, 2): {
'start': datetime.time(8, 30, 0),
'end': datetime.time(16, 45, 0),
}
}
}
}
"""
data = {}
csv_data = get_data_from_csv()
tree = ElementTree.parse(app.config['DATA_XML'])
children = tree.getroot().getchildren()
users = filter(lambda x: x.tag == 'users', children)
if not len(users):
raise KeyError('No users data in XML file.')
for user in users[0]:
user_id = int(user.attrib['id'])
name = user.find('name').text
try:
presence = csv_data[user_id]
except KeyError:
presence = []
data.setdefault(user_id, {})['name'] = name
data.setdefault(user_id, {})['presence'] = presence
return data
def get_data_from_csv():
"""
Extracts presence data from CSV file and groups it by user_id.
It creates structure like this:
data = {
'user_id': {
datetime.date(2013, 10, 1): {
'start': datetime.time(9, 0, 0),
'end': datetime.time(17, 30, 0),
},
datetime.date(2013, 10, 2): {
'start': datetime.time(8, 30, 0),
'end': datetime.time(16, 45, 0),
},
}
}
"""
data = {}
with open(app.config['DATA_CSV'], 'r') as csvfile:
presence_reader = csv.reader(csvfile, delimiter=',')
for i, row in enumerate(presence_reader):
if len(row) != 4:
# ignore header and footer lines
continue
try:
user_id = int(row[0])
date = datetime.strptime(row[1], '%Y-%m-%d').date()
start = datetime.strptime(row[2], '%H:%M:%S').time()
end = datetime.strptime(row[3], '%H:%M:%S').time()
except (ValueError, TypeError):
log.debug('Problem with line %d: ', i, exc_info=True)
data.setdefault(user_id, {})[date] = {'start': start, 'end': end}
return data
@cache(600)
def get_dates():
"""
Extracts months and years from CSV for dropdown menu.
"""
months = []
with open(app.config['DATA_CSV'], 'r') as csvfile:
presence_reader = csv.reader(csvfile, delimiter=',')
for i, row in enumerate(presence_reader):
if len(row) != 4:
# ignore header and footer lines
continue
try:
date = datetime.strptime(row[1], '%Y-%m-%d').date()
value = '{}-{}'.format(
date.year,
calendar.month_name[date.month]
)
except (ValueError, TypeError):
log.debug('Problem with line %d: ', i, exc_info=True)
if value not in months:
months.append(value)
return list(reversed(months))
def get_server_config():
"""
Extracts server config from XML file.
"""
tree = ElementTree.parse(app.config['DATA_XML'])
children = tree.getroot().getchildren()
config = filter(lambda x: x.tag == 'server', children)
if not len(config):
raise KeyError('No server info in XML file.')
return {
'host': config[0].find('host').text,
'port': config[0].find('port').text,
'protocol': config[0].find('protocol').text
}
def group_by_weekday(items):
"""
Groups presence entries by weekday.
"""
result = [[] for i in xrange(7)] # one list for every day in week
for date in items:
start = items[date]['start']
end = items[date]['end']
result[date.weekday()].append(interval(start, end))
return result
def seconds_since_midnight(time):
"""
Calculates amount of seconds since midnight.
"""
return time.hour * 3600 + time.minute * 60 + time.second
def interval(start, end):
"""
Calculates interval in seconds between two datetime.time objects.
"""
return seconds_since_midnight(end) - seconds_since_midnight(start)
def mean(items):
"""
Calculates arithmetic mean. Returns zero for empty lists.
"""
return float(sum(items)) / len(items) if len(items) > 0 else 0
def work_hours(items):
"""
Returns tuple of start and end hours.
"""
start_hours = [[] for i in xrange(7)]
end_hours = [[] for i in xrange(7)]
for date in items:
start = items[date]['start']
end = items[date]['end']
start_hours[date.weekday()].append(seconds_since_midnight(start))
end_hours[date.weekday()].append(seconds_since_midnight(end))
return (start_hours, end_hours)
def total_hours(items, month, year):
"""
Returns total working hours.
"""
if not items:
return 0
year = int(year)
seconds = 0
for date in items:
if calendar.month_name[date.month] == month and date.year == year:
seconds += interval(items[date]['start'], items[date]['end'])
return round(seconds / 3600, 2)
| djallberto/presence-analyzer-anowak | src/presence_analyzer/utils.py | Python | mit | 7,408 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
OTBUtils.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
(C) 2013 by CS Systemes d'information (CS SI)
Email : volayaf at gmail dot com
otb at c-s dot fr (CS SI)
Contributors : Victor Olaya
Julien Malik, Oscar Picas (CS SI) - add functions to manage xml tree
Alexia Mondot (CS SI) - add a trick for OTBApplication SplitImages
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.core import QgsApplication
import subprocess
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.ProcessingLog import ProcessingLog
from processing.tools.system import *
import logging
import xml.etree.ElementTree as ET
import traceback
import qgis.core
class OTBUtils:
OTB_FOLDER = "OTB_FOLDER"
OTB_LIB_FOLDER = "OTB_LIB_FOLDER"
OTB_SRTM_FOLDER = "OTB_SRTM_FOLDER"
OTB_GEOID_FILE = "OTB_GEOID_FILE"
@staticmethod
def findOtbPath():
folder = None
#try to configure the path automatically
if isMac():
testfolder = os.path.join(str(QgsApplication.prefixPath()), "bin")
if os.path.exists(os.path.join(testfolder, "otbcli")):
folder = testfolder
else:
testfolder = "/usr/local/bin"
if os.path.exists(os.path.join(testfolder, "otbcli")):
folder = testfolder
elif isWindows():
testfolder = os.path.join(os.path.dirname(QgsApplication.prefixPath()),
os.pardir, "bin")
if os.path.exists(os.path.join(testfolder, "otbcli.bat")):
folder = testfolder
else:
testfolder = "/usr/bin"
if os.path.exists(os.path.join(testfolder, "otbcli")):
folder = testfolder
return folder
@staticmethod
def otbPath():
folder = OTBUtils.findOtbPath()
if folder is None:
folder = ProcessingConfig.getSetting(OTBUtils.OTB_FOLDER)
return folder
@staticmethod
def findOtbLibPath():
folder = None
#try to configure the path automatically
if isMac():
testfolder = os.path.join(str(QgsApplication.prefixPath()), "lib/otb/applications")
if os.path.exists(testfolder):
folder = testfolder
else:
testfolder = "/usr/local/lib/otb/applications"
if os.path.exists(testfolder):
folder = testfolder
elif isWindows():
testfolder = os.path.join(os.path.dirname(QgsApplication.prefixPath()), "orfeotoolbox", "applications")
if os.path.exists(testfolder):
folder = testfolder
else:
testfolder = "/usr/lib/otb/applications"
if os.path.exists(testfolder):
folder = testfolder
return folder
@staticmethod
def otbLibPath():
folder = OTBUtils.findOtbLibPath()
if folder is None:
folder = ProcessingConfig.getSetting(OTBUtils.OTB_LIB_FOLDER)
return folder
@staticmethod
def otbSRTMPath():
folder = ProcessingConfig.getSetting(OTBUtils.OTB_SRTM_FOLDER)
if folder == None:
folder =""
return folder
@staticmethod
def otbGeoidPath():
filepath = ProcessingConfig.getSetting(OTBUtils.OTB_GEOID_FILE)
if filepath == None:
filepath =""
return filepath
@staticmethod
def otbDescriptionPath():
return os.path.join(os.path.dirname(__file__), "description")
@staticmethod
def executeOtb(commands, progress):
loglines = []
loglines.append("OTB execution console output")
os.putenv('ITK_AUTOLOAD_PATH', OTBUtils.otbLibPath())
fused_command = ''.join(['"%s" ' % c for c in commands])
proc = subprocess.Popen(fused_command, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE,stderr=subprocess.STDOUT, universal_newlines=True).stdout
for line in iter(proc.readline, ""):
if "[*" in line:
idx = line.find("[*")
perc = int(line[idx-4:idx-2].strip(" "))
if perc != 0:
progress.setPercentage(perc)
else:
loglines.append(line)
progress.setConsoleInfo(line)
ProcessingLog.addToLog(ProcessingLog.LOG_INFO, loglines)
def get_choices_of(doc, parameter):
choices = []
try:
t5 = [item for item in doc.findall('.//parameter') if item.find('key').text == parameter]
choices = [item.text for item in t5[0].findall('options/choices/choice')]
except:
logger = logging.getLogger('OTBGenerator')
logger.warning(traceback.format_exc())
return choices
def remove_dependant_choices(doc, parameter, choice):
choices = get_choices_of(doc, parameter)
choices.remove(choice)
for a_choice in choices:
t4 = [item for item in doc.findall('.//parameter') if '.%s' % a_choice in item.find('key').text]
for t5 in t4:
doc.remove(t5)
def renameValueField(doc, textitem, field, newValue):
t4 = [item for item in doc.findall('.//parameter') if item.find('key').text == textitem]
for t5 in t4:
t5.find(field).text = newValue
def remove_independant_choices(doc, parameter, choice):
choices = []
choices.append(choice)
for a_choice in choices:
t4 = [item for item in doc.findall('.//parameter') if '.%s' % a_choice in item.find('key').text]
for t5 in t4:
doc.remove(t5)
def remove_parameter_by_key(doc, parameter):
t4 = [item for item in doc.findall('.//parameter') if item.find('key').text == parameter]
for t5 in t4:
doc.remove(t5)
def remove_other_choices(doc, parameter, choice):
t5 = [item for item in doc.findall('.//parameter') if item.find('key').text == parameter]
if len(t5) > 0:
choices = [item for item in t5[0].findall('options/choices/choice') if item.text != choice]
choice_root = t5[0].findall('options/choices')[0]
for a_choice in choices:
choice_root.remove(a_choice)
def remove_choice(doc, parameter, choice):
t5 = [item for item in doc.findall('.//parameter') if item.find('key').text == parameter]
if len(t5) > 0:
choices = [item for item in t5[0].findall('options/choices/choice') if item.text == choice]
choice_root = t5[0].findall('options/choices')[0]
for a_choice in choices:
choice_root.remove(a_choice)
def split_by_choice(doc, parameter):
"""
splits the given doc into several docs according to the given parameter
returns a dictionary of documents
"""
result = {}
choices = get_choices_of(doc, parameter)
import copy
for choice in choices:
#creates a new copy of the document
working_copy = copy.deepcopy(doc)
remove_dependant_choices(working_copy, parameter, choice)
#remove all other choices except the current one
remove_other_choices(working_copy, parameter, choice)
#set a new name according to the choice
old_app_name = working_copy.find('key').text
working_copy.find('key').text = '%s-%s' % (old_app_name, choice)
old_longname = working_copy.find('longname').text
working_copy.find('longname').text = '%s (%s)' % (old_app_name, choice)
#add it to the dictionary
result[choice] = working_copy
return result
def remove_parameter_by_criteria(doc, criteria):
t4 = [item for item in doc.findall('./parameter') if criteria(item)]
for t5 in t4:
doc.getroot().remove(t5)
def defaultWrite(available_app, original_dom_document):
fh = open("description/%s.xml" % available_app, "w")
the_root = original_dom_document
logger = logging.getLogger('OTBGenerator')
ET.ElementTree(the_root).write(fh)
fh.close()
def defaultSplit(available_app, original_dom_document, parameter):
the_root = original_dom_document
split = split_by_choice(the_root, parameter)
the_list = []
for key in split:
defaultWrite('%s-%s' % (available_app, key), split[key])
the_list.append(split[key])
return the_list
| luofei98/qgis | python/plugins/processing/algs/otb/OTBUtils.py | Python | gpl-2.0 | 9,300 |
from __future__ import print_function
import argparse
import json
import logging
import os
import re
import sys
import wdl.parser
from jinja2 import Environment, FileSystemLoader
__version__ = '0.2'
handlers = {}
# set up logging
logger = logging.getLogger('Main')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
logger.addHandler(ch)
typemap = {"Int": "int",
"File": "File",
"String": "string",
"Array": "array",
"Float": "float",
"Boolean": "boolean"}
def ihandle(i, **kw):
if isinstance(i, wdl.parser.Terminal):
if i.str == "string":
return '"%s"' % i.source_string
elif i.str == "integer":
return i.source_string
elif i.str == "cmd_part":
return i.source_string
elif i.str == "type":
return typemap[i.source_string]
elif i.str == "fqn":
return "#" + i.source_string
elif i.str == "asterisk":
return i.source_string
elif i.str in "identifier":
if kw.get("in_expression"):
# kw.get("depends_on").add(i)
if i.source_string in kw.get("filevars", ""):
return "inputs.%s.path" % i.source_string
else:
return "inputs." + i.source_string
else:
return i.source_string
else:
raise NotImplementedError("Unknown terminal '%s'" % i.str)
else:
return handlers[i.name](i, **kw)
def handleDocument(item, **kwargs):
defs = []
for i in item.attr("imports"):
ihandle(i, **kwargs)
for i in item.attr("definitions"):
defs.append(ihandle(i, **kwargs))
return defs
def handleImport(item, **kwargs):
raise NotImplementedError('Import not implemented')
def handleTask(item, **kwargs):
tool = {"id": ihandle(item.attr("name"), **kwargs),
"class": "CommandLineTool",
"cwlVersion": "v1.0",
"baseCommand": [],
"requirements": [{"class": "ShellCommandRequirement"},
{"class": "InlineJavascriptRequirement"}],
"inputs": [],
"outputs": []}
filevars = kwargs.get("filevars", set())
for i in item.attr("declarations"):
# NO! declarations can be expressions of other inputs and thus must not be treated as file inputs
tool["inputs"].append(ihandle(i, context=tool,
assignments=kwargs.get("assignments", {}),
filevars=filevars,
**kwargs))
for i in item.attr("sections"):
ihandle(i, context=tool, assignments=kwargs.get("assignments", {}),
filevars=filevars, **kwargs)
return tool
def handleWorkflow(item, **kwargs):
wf = {"id": ihandle(item.attr("name")),
"class": "Workflow",
"cwlVersion": "v1.0",
"inputs": [],
"outputs": [],
"requirements": [
{"class": "InlineJavascriptRequirement"}
],
"steps": []}
assignments = {}
filevars = set()
for i in item.attr("body"):
if i.name == "Call":
wf["steps"].append(ihandle(i, context=wf, assignments=assignments,
filevars=filevars, **kwargs))
elif i.name == "Declaration":
# NO! declarations can be expressions of other inputs and thus must not be treated as inputs
inp = ihandle(i, context=wf, assignments=assignments,
filevars=filevars, **kwargs)
if inp:
wf["inputs"].append(inp)
elif i.name == "WorkflowOutputs":
wf["outputs"] = ihandle(i, context=wf, **kwargs)
elif i.name == "Scatter":
wf["steps"].extend(ihandle(i, context=wf, assignments=assignments,
filevars=filevars, **kwargs))
else:
raise NotImplementedError
if wf['outputs'] == []:
for step in wf['steps']:
copy_step_outputs_to_workflow_outputs(step, wf['outputs'], **kwargs)
return wf
def handleRuntime(item, **kwargs):
for runtimeRequirement in item.attr('map'):
key = ihandle(runtimeRequirement.attr('key'))
if key == 'docker':
value = ihandle(runtimeRequirement.attr('value'))
if type(value) is list:
value = value[0] # if there are several Docker images, pick the first one (due to CWL restrictions)
kwargs['context']['requirements'].append({
'class': 'DockerRequirement',
'dockerPull': strip_special_ch(value)
})
elif key == 'memory':
kwargs['context']['requirements'].append({
'class': 'ResourceRequirement',
'ramMin': strip_special_ch(ihandle(runtimeRequirement.attr('value')))
})
else:
logger.warning('Field "{0}" is ignored'.format(key))
def handleType(item, **kwargs):
def _convert_bracket_notation(_type):
if _type.endswith(']'):
return {'type': 'array',
'items': _type.split('[')[0]}
else:
return _type
param_type = ihandle(item.attr('name'))
subtype = ihandle(item.attr('subtype')[0])
if param_type == 'array' and not subtype.endswith(']'):
return subtype + '[]'
else:
return {'type': param_type,
'items': _convert_bracket_notation(subtype)}
def strip_special_ch(string):
return string.strip('"\'')
def handleDeclaration(item, context=None, assignments=None, filevars=None, **kwargs):
param_id = ihandle(item.attr("name"))
param_type = ihandle(item.attr("type"), **kwargs)
expression = item.attr("expression")
kwargs['context'] = context
if expression is None:
# assignments[param_id] = "#%s/%s" % (context["id"], param_id)
if param_type == "File":
filevars.add(param_id)
return {"id": param_id,
"type": param_type}
else:
kwargs['outputName'] = param_id
result = ihandle(expression, **kwargs)
if result:
# if result[0] in {'\'', '"'} # expression is string
if result.startswith('['):
result = eval(result)
else:
result = '$(' + result + ')'
return {"id": param_id,
"type": param_type,
"default": result}
# return {"id": param_id,
# "valueFrom": result}
def handleRawCommand(item, context=None, **kwargs):
s = body = ''
symbols = []
parts = item.attr('parts')
if 'python' in ihandle(parts[0]):
for i, part in enumerate(parts):
# TODO: python commands
pass
pass
for p in parts:
kwargs['command'] = s
part = ihandle(p, **kwargs)
if type(part) is list:
body += part[0]
s += part[1]
symbols.append(part[1])
else:
s += part
if body: # if the expr. is a function body, not a simple expr.
symbols.append('\$\(.*?\)')
l = re.split('(' + '|'.join(symbols) + ')', s)
res = []
for k in l:
if k in set(symbols[:-1]):
res.append(k)
elif '$' in k:
res.append(re.sub('[$()]', '', k))
else:
res.append("\"" + k + "\"")
s = ' + '.join(res)
result = '${' + body + 'return ' + s + '}'
else:
result = s
result = re.sub(r'\\\n\s*', '', result)
result = result.strip()
result = result.replace('\n', '')
context["arguments"] = [{"valueFrom": result, "shellQuote": False}]
def handleCommandParameter(item, context=None, **kwargs):
attributes = item.attr('attributes')
for option in attributes:
key = ihandle(option.attr('key'))
if key == 'sep':
parameter = item.attr('expr').source_string
string = parameter + '_separated'
preprocessing = """
var {2} = '';
for (var i=0; i<inputs.{0}.length; i++){{
{2} = {2} + inputs.{0}[i].path + '{1}';
}}
{2} = {2}.replace(/{1}$/, '');
""".format(parameter,
ihandle(option.attr('value')).replace('\"', ""),
string)
return [preprocessing, string]
return "$(" + ihandle(item.attr("expr"), in_expression=True, depends_on=set(), **kwargs) + ")"
def handleOutputs(item, context=None, **kwargs):
for a in item.attr("attributes"):
out = {"id": ihandle(a.attr("name")),
"type": ihandle(a.attr("type")),
"outputBinding": {}}
e = ihandle(a.attr("expression"),
is_expression=True,
depends_on=set(),
outputs=out,
tool=context,
context=context,
**kwargs)
if type(e) is str:
e = e.replace('{', '(').replace('}', ')').replace("\"", '')
if not str(re.search('\((.+?)\)', e)).startswith('inputs'):
if not 'self' in e:
index = e.index('(')
e = e[:index + 1] + 'inputs.' + e[index + 1:]
if e != "self":
out["outputBinding"]["glob"] = e
context["outputs"].append(out)
def handleFunctionCall(item, **kwargs):
global expression_tools
function_name = ihandle(item.attr("name"))
if function_name == "stdout":
kwargs["tool"]["stdout"] = "__stdout"
kwargs["outputs"]["outputBinding"]["glob"] = "__stdout"
return "self[0]"
elif function_name == "read_int":
kwargs["outputs"]["outputBinding"]["loadContents"] = True
return "parseInt(" + ihandle(item.attr("params")[0], **kwargs) + ".contents)"
elif function_name == "read_string":
kwargs["outputs"]["outputBinding"]["loadContents"] = True
return ihandle(item.attr("params")[0], **kwargs) + ".contents"
elif function_name == "read_tsv":
try:
params = [ihandle(param, **kwargs) for param in item.attr('params')]
tool_name = step_name = 'read_tsv'
tool_file = tool_name + '.cwl'
# handling duplicate step names due to hypothetical multiple expr. tools calls
if kwargs['context'].get('steps', ''):
read_tsv_steps = [step['id'] for step in kwargs['context']['steps'] if step['id'].startswith(step_name)]
else:
read_tsv_steps = None
if not read_tsv_steps:
step_name += '_1'
else:
step_name += str(int(read_tsv_steps[-1][-1]) + 1) # if there are step_1, step_2 - create step_3
output_name = kwargs['outputName']
# TODO: params[0] - looks like magic
read_tsv_step = {'id': step_name,
'run': tool_file,
'in': {
'infile': params[0]},
'out': [output_name]
}
kwargs['context']['steps'].insert(0, read_tsv_step)
SUBSTITUTIONS = {'outputs': ('outputArray', output_name),
'expression': ('outputArray', output_name)}
expression_tools.append((tool_file, SUBSTITUTIONS))
except:
pass
elif function_name == 'sub':
params = item.attr('params')
kwargs['in_expression'] = True
result = ihandle(params[0], **kwargs) + '.replace(' + ihandle(params[1]) + ', ' + ihandle(params[2]) + ')'
return result
elif function_name == 'glob':
return [ihandle(param).strip('"\'') for param in item.attr('params')]
else:
raise NotImplementedError("Unknown function '%s'" % ihandle(item.attr("name")))
def handleCall(item, context=None, assignments=None, **kwargs):
if item.attr("alias") is not None:
stepid = ihandle(item.attr("alias"))
else:
stepid = ihandle(item.attr("task")).strip('#')
step = {"id": stepid,
"in": [],
"out": [],
"run": ihandle(item.attr("task")).strip('#') + '.cwl'}
for out in kwargs["tasks"][ihandle(item.attr("task")).strip('#')]["outputs"]:
step["out"].append({"id": out["id"]})
mem = "%s.%s" % (stepid, out["id"])
assignments[mem] = "#%s/%s/%s" % (context["id"], stepid, out["id"])
if out["type"] == "File":
kwargs["filevars"].add(mem)
b = item.attr("body")
if b is not None:
ihandle(b, context=step, assignments=assignments, **kwargs)
for taskinp in kwargs["tasks"][ihandle(item.attr("task")).strip('#')]["inputs"]:
f = [stepinp for stepinp in step["in"] if stepinp["id"] == taskinp["id"]]
if not f and taskinp.get("default") is None:
newinp = "%s_%s" % (stepid, taskinp["id"])
context["inputs"].append({
"id": newinp,
"type": taskinp["type"]
})
step["in"].append({
"id": taskinp["id"],
"source": "%s" % (newinp)
})
return step
def handleCallBody(item, **kwargs):
for i in item.attr("io"):
ihandle(i, **kwargs)
def handleScatter(item, **kwargs):
scatter_requirements = [{'class': 'ScatterFeatureRequirement'},
{'class': 'StepInputExpressionRequirement'}]
if scatter_requirements not in kwargs['context']['requirements']:
kwargs['context']['requirements'].extend(scatter_requirements)
# TODO: smart scattering (over subworkflows rather than individual steps)
steps = []
for task in item.attr('body'):
if task.name == 'Declaration':
kwargs['context']['inputs'].append(ihandle(task, **kwargs))
elif task.name == 'Call':
tool_name = ihandle(task.attr("task")).strip('#')
alias = task.attr("alias")
if alias is not None:
stepid = ihandle(alias)
else:
stepid = tool_name
step = {"id": stepid,
"in": [],
"out": [],
"run": tool_name + '.cwl'}
scatter_vars = [ihandle(item.attr('item')), ihandle(item.attr('collection'))]
# Explanation: in WDL - scatter (scatter_vars[0] in scatter_vars[1])
kwargs['scatter_vars'] = scatter_vars
kwargs['scatter_inputs'] = []
kwargs['step'] = step
for out in kwargs["tasks"][tool_name]["outputs"]:
step["out"].append({"id": out["id"]})
b = task.attr("body")
if b is not None:
ihandle(b, **kwargs)
scatter_inputs = kwargs['scatter_inputs']
if type(scatter_inputs) is list and len(scatter_inputs) > 1:
step['scatterMethod'] = 'dotproduct'
step.update({"scatter": kwargs['scatter_inputs']})
steps.append(step)
else:
raise NotImplementedError
return steps
def handleIOMapping(item, context=None, assignments=None, filevars=None, **kwargs):
mp = {"id": ihandle(item.attr("key"))}
scatter_vars = kwargs.get('scatter_vars', '')
value = ihandle(item.attr("value"))
if value.endswith(']'):
wdl_var = value[:-3]
else:
wdl_var = value
if scatter_vars and ((wdl_var == scatter_vars[0]) or scatter_vars[0] in wdl_var):
kwargs['scatter_inputs'].append(mp['id'])
source = 'inputs'
for step in context['steps']:
if scatter_vars[1] in step['out']:
source = step['id']
if source != 'inputs':
mp["source"] = "#{0}/{1}".format(source, scatter_vars[1])
else:
mp['source'] = scatter_vars[1]
if scatter_vars[1] in mp.get("source", ""):
mp["valueFrom"] = "$(" + ihandle(item.attr("value"),
in_expression=False,
filevars=filevars).replace(scatter_vars[0], "self") + ")"
else:
value_is_literal = hasattr(item.attr("value"), 'str') and \
((item.attr('value').str == 'string') or item.attr('value').str == 'integer')
value_is_expression = hasattr(item.attr("value"), 'name')
if (value_is_literal) or (value_is_expression and item.attr('value').name != 'MemberAccess'):
mp['valueFrom'] = '$({0})'.format(value)
elif value_is_expression and item.attr('value').name == 'MemberAccess':
mp['source'] = '#' + value
else:
mp['source'] = value
if scatter_vars:
kwargs['step']["in"].append(mp)
else:
context['in'].append(mp)
def handleWorkflowOutputs(item, **kwargs):
outputs = []
for output in item.attr('outputs'):
cwl_output = {}
for key, value in output.attributes.items():
if value:
if key == 'wildcard':
if ihandle(value) == '*':
# asterisk means that all outputs from the task must be copied to workflow outputs
for step in kwargs['context']['steps']:
if step['id'] == cwl_output['source_step'].strip('#'):
copy_step_outputs_to_workflow_outputs(step, outputs, **kwargs)
del cwl_output['source_step']
elif key == 'fqn':
res = ihandle(value)
if type(res) is str:
res = res.split('.')
if len(res) > 1:
cwl_output['id'] = res[1]
cwl_output['outputSource'] = res[0] + '/' + res[1]
else:
cwl_output['source_step'] = res[0]
elif key == 'name':
cwl_output['id'] = ihandle(value)
elif key == 'type':
cwl_output['type'] = ihandle(value)
elif key == 'expression':
cwl_output['outputSource'] = ihandle(value)
if cwl_output:
outputs.append(cwl_output)
return outputs
def copy_step_outputs_to_workflow_outputs(step, outputs, **kwargs):
def _find_type():
task = kwargs['tasks'].get(step['id'], "")
if task:
for output_param in task['outputs']:
if output_param['id'] == id:
if 'scatter' in step:
return {'type': 'array',
'items': output_param['type']}
else:
return output_param['type']
else:
return "Any"
for output in step['out']:
if type(output) is dict:
id = output['id']
else:
id = output
outputs.append({"id": step['id'] + '_' + id,
"type": _find_type(),
"outputSource": '#' + step['id'] + '/' + id})
def handleInputs(item, **kwargs):
for m in item.attr("map"):
ihandle(m, **kwargs)
def handleOptionalType(item, **kwargs):
param_type = ihandle(item.attr('innerType'), **kwargs)
if type(param_type) is dict:
if param_type['type'] == 'array':
return param_type['items'] + '[]?'
else:
param_type['type'] = [param_type['type'], 'null']
return param_type
else:
return param_type + '?'
def handleParameterMeta(item, **kwargs):
inputs = set(el['id'] for el in kwargs['context']['inputs'])
for el in item.attr('map'):
key, value = ihandle(el, **kwargs)
if key in inputs:
for i, inp in enumerate(kwargs['context']['inputs']):
if inp['id'] == key:
kwargs['context']['inputs'][i]['doc'] = value.strip('\'"')
break
def handleRuntimeAttribute(item, **kwargs):
return ihandle(item.attr('key')), ihandle(item.attr('value'))
def handleArrayLiteral(item, **kwargs):
return '[' + ', '.join([ihandle(list_item) for list_item in item.attr('values')]).strip(' ,') + ']'
def handleMultiply(ex, **kwargs):
return ihandle(ex.attr("lhs"), **kwargs) + " * " + ihandle(ex.attr("rhs"), **kwargs)
def handleAdd(ex, **kwargs):
return ihandle(ex.attr("lhs"), **kwargs) + " + " + ihandle(ex.attr("rhs"), **kwargs)
def handleArrayOrMapLookup(ex, **kwargs):
return ihandle(ex.attr("lhs"), **kwargs) + "[" + ihandle(ex.attr("rhs"), **kwargs) + ']'
def handleMemberAccess(item, **kwargs):
mem = ihandle(item.attr("lhs"), **kwargs) + "/" + ihandle(item.attr("rhs"), **kwargs)
return mem
m = sys.modules[__name__]
for k, v in m.__dict__.copy().items():
if k.startswith("handle"):
handlers[k[6:]] = v
env = Environment(
loader=FileSystemLoader(os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))),
trim_blocks=True,
lstrip_blocks=True)
main_template = env.get_template('cwltool.j2')
expression_tools = [] # [(file, SUBSTITUTIONS)] // SUBSTITUTIONS = {'path/to/substitute', (term, sub)}
def printstuff(wdl_code, directory=os.getcwd(), quiet=False):
# Parse source code into abstract syntax tree
ast = wdl.parser.parse(wdl_code).ast()
# print(ast.dumps(indent=2))
tasks = {}
# Find all 'Task' ASTs
task_asts = wdl.find_asts(ast, 'Task')
for task_ast in task_asts:
tool = ihandle(task_ast)
# cwl.append(a)
export_tool(tool, directory, quiet=quiet)
tasks[ihandle(task_ast.attr("name"))] = tool
# Find all 'Workflow' ASTs
workflow_asts = wdl.find_asts(ast, 'Workflow')
for workflow_ast in workflow_asts:
wf = ihandle(workflow_ast, tasks=tasks)
export_tool(wf, directory, quiet)
for expression_tool in expression_tools:
export_expression_tool(expression_tool[0], expression_tool[1], directory)
main_template.render()
def export_tool(tool, directory, quiet=False):
if not quiet:
print(json.dumps(tool, indent=4))
data = main_template.render(version=__version__,
code=json.dumps(tool, indent=4))
filename = '{0}.cwl'.format(tool['id'])
filename = os.path.join(directory, filename)
with open(filename, 'w') as f:
f.write(data)
logger.info('Generated file {0}'.format(filename))
def export_expression_tool(tool, substitutions, directory):
replacements = dict([sub for sub in substitutions.values()])
pattern = re.compile('|'.join(replacements.keys()))
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)),
'expression-tools', tool)) as source:
with open(os.path.join(directory, tool), 'w') as target:
for line in source:
target.write(pattern.sub(lambda x: replacements[x.group()], line))
def process_file(file, args):
with open(file) as f:
k = f.read()
k.replace('\n', '')
if args.directory:
args.directory = os.path.abspath(args.directory)
if os.path.isdir(args.directory):
os.chdir(args.directory)
else:
os.mkdir(args.directory)
os.chdir(args.directory)
else:
args.directory = os.getcwd()
if not args.no_folder:
cwl_directory = os.path.join(args.directory, os.path.basename(os.path.abspath(file)).replace('.wdl', ''))
os.mkdir(cwl_directory)
printstuff(k, cwl_directory, args.quiet)
else:
printstuff(k, directory=args.directory, quiet=args.quiet)
def main():
parser = argparse.ArgumentParser(description='Convert a WDL workflow to CWL')
parser.add_argument('workflow', help='a WDL workflow or a directory with WDL files')
parser.add_argument('-d', '--directory', help='Directory to store CWL files')
parser.add_argument('-q', '--quiet', action='store_true', help='Do not print generated files to stdout')
parser.add_argument('--no-folder', action='store_true', help='Do not create a separate folder for each toolset')
args = parser.parse_args()
args.workflow = os.path.abspath(args.workflow)
if os.path.isdir(args.workflow):
for el in os.listdir(args.workflow):
if el.endswith('.wdl'):
try:
process_file(os.path.join(args.workflow, el), args)
except Exception as e:
logger.error("Error while processing file {0}: {1}".format(el, e))
pass
else:
process_file(args.workflow, args)
if __name__ == '__main__':
main()
| common-workflow-language/wdl2cwl | wdl2cwl/main.py | Python | apache-2.0 | 25,014 |
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""This module detects whether third-party libraries, utilized by third-party
drivers, are present on the system. If they are not, it mocks them and tinkers
with sys.modules so that the drivers can be loaded by unit tests, and the unit
tests can continue to test the functionality of those drivers without the
respective external libraries' actually being present.
Any external library required by a third-party driver should be mocked here.
Current list of mocked libraries:
- seamicroclient
- ipminative
- proliantutils
- pysnmp
- scciclient
"""
import sys
import mock
from oslo.utils import importutils
from ironic.drivers.modules import ipmitool
# attempt to load the external 'seamicroclient' library, which is
# required by the optional drivers.modules.seamicro module
seamicroclient = importutils.try_import("seamicroclient")
if not seamicroclient:
smc = mock.Mock()
smc.client = mock.Mock()
smc.exceptions = mock.Mock()
smc.exceptions.ClientException = Exception
smc.exceptions.UnsupportedVersion = Exception
sys.modules['seamicroclient'] = smc
sys.modules['seamicroclient.client'] = smc.client
sys.modules['seamicroclient.exceptions'] = smc.exceptions
# if anything has loaded the seamicro driver yet, reload it now that
# the external library has been mocked
if 'ironic.drivers.modules.seamicro' in sys.modules:
reload(sys.modules['ironic.drivers.modules.seamicro'])
# IPMITool driver checks the system for presence of 'ipmitool' binary during
# __init__. We bypass that check in order to run the unit tests, which do not
# depend on 'ipmitool' being on the system.
ipmitool.TIMING_SUPPORT = False
ipmitool.DUAL_BRIDGE_SUPPORT = False
ipmitool.SINGLE_BRIDGE_SUPPORT = False
pyghmi = importutils.try_import("pyghmi")
if not pyghmi:
p = mock.Mock()
p.exceptions = mock.Mock()
p.exceptions.IpmiException = Exception
p.ipmi = mock.Mock()
p.ipmi.command = mock.Mock()
p.ipmi.command.Command = mock.Mock()
sys.modules['pyghmi'] = p
sys.modules['pyghmi.exceptions'] = p.exceptions
sys.modules['pyghmi.ipmi'] = p.ipmi
sys.modules['pyghmi.ipmi.command'] = p.ipmi.command
# FIXME(deva): the next line is a hack, because several unit tests
# actually depend on this particular string being present
# in pyghmi.ipmi.command.boot_devices
p.ipmi.command.boot_devices = {'pxe': 4}
if 'ironic.drivers.modules.ipminative' in sys.modules:
reload(sys.modules['ironic.drivers.modules.ipminative'])
proliantutils = importutils.try_import('proliantutils')
if not proliantutils:
proliantutils = mock.MagicMock()
sys.modules['proliantutils'] = proliantutils
sys.modules['proliantutils.ilo'] = proliantutils.ilo
sys.modules['proliantutils.ilo.ribcl'] = proliantutils.ilo.ribcl
proliantutils.ilo.ribcl.IloError = type('IloError', (Exception,), {})
command_exception = type('IloCommandNotSupportedError', (Exception,), {})
proliantutils.ilo.ribcl.IloCommandNotSupportedError = command_exception
if 'ironic.drivers.ilo' in sys.modules:
reload(sys.modules['ironic.drivers.ilo'])
# attempt to load the external 'pywsman' library, which is required by
# the optional drivers.modules.drac module
pywsman = importutils.try_import('pywsman')
if not pywsman:
pywsman = mock.Mock()
sys.modules['pywsman'] = pywsman
# if anything has loaded the drac driver yet, reload it now that the
# external library has been mocked
if 'ironic.drivers.modules.drac' in sys.modules:
reload(sys.modules['ironic.drivers.modules.drac'])
# attempt to load the external 'iboot' library, which is required by
# the optional drivers.modules.iboot module
iboot = importutils.try_import("iboot")
if not iboot:
ib = mock.Mock()
ib.iBootInterface = mock.Mock()
sys.modules['iboot'] = ib
# if anything has loaded the iboot driver yet, reload it now that the
# external library has been mocked
if 'ironic.drivers.modules.iboot' in sys.modules:
reload(sys.modules['ironic.drivers.modules.iboot'])
# attempt to load the external 'pysnmp' library, which is required by
# the optional drivers.modules.snmp module
pysnmp = importutils.try_import("pysnmp")
if not pysnmp:
pysnmp = mock.Mock()
sys.modules["pysnmp"] = pysnmp
sys.modules["pysnmp.entity"] = pysnmp.entity
sys.modules["pysnmp.entity.rfc3413"] = pysnmp.entity.rfc3413
sys.modules["pysnmp.entity.rfc3413.oneliner"] = (
pysnmp.entity.rfc3413.oneliner)
sys.modules["pysnmp.entity.rfc3413.oneliner.cmdgen"] = (
pysnmp.entity.rfc3413.oneliner.cmdgen)
sys.modules["pysnmp.error"] = pysnmp.error
pysnmp.error.PySnmpError = Exception
sys.modules["pysnmp.proto"] = pysnmp.proto
sys.modules["pysnmp.proto.rfc1902"] = pysnmp.proto.rfc1902
# Patch the RFC1902 integer class with a python int
pysnmp.proto.rfc1902.Integer = int
# if anything has loaded the snmp driver yet, reload it now that the
# external library has been mocked
if 'ironic.drivers.modules.snmp' in sys.modules:
reload(sys.modules['ironic.drivers.modules.snmp'])
# attempt to load the external 'scciclient' library, which is required by
# the optional drivers.modules.irmc module
scciclient = importutils.try_import('scciclient')
if not scciclient:
mock_scciclient = mock.MagicMock()
sys.modules['scciclient'] = mock_scciclient
sys.modules['scciclient.irmc'] = mock_scciclient.irmc
sys.modules['scciclient.irmc.scci'] = mock.MagicMock(
POWER_OFF=mock.sentinel.POWER_OFF,
POWER_ON=mock.sentinel.POWER_ON,
POWER_RESET=mock.sentinel.POWER_RESET)
# if anything has loaded the iRMC driver yet, reload it now that the
# external library has been mocked
if 'ironic.drivers.modules.irmc' in sys.modules:
reload(sys.modules['ironic.drivers.modules.irmc'])
pyremotevbox = importutils.try_import('pyremotevbox')
if not pyremotevbox:
pyremotevbox = mock.MagicMock()
pyremotevbox.exception = mock.MagicMock()
pyremotevbox.exception.PyRemoteVBoxException = Exception
pyremotevbox.exception.VmInWrongPowerState = Exception
sys.modules['pyremotevbox'] = pyremotevbox
if 'ironic.drivers.modules.virtualbox' in sys.modules:
reload(sys.modules['ironic.drivers.modules.virtualbox'])
| ramineni/myironic | ironic/tests/drivers/third_party_driver_mocks.py | Python | apache-2.0 | 6,947 |
from django.http import HttpResponse, Http404
from django.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from view.models import Weatherdata, Weatherstation, Cities
def index(request):
wd = Weatherdata.objects.select_related('weatherstation', 'weatherstation__city', 'city__country').all()[:10]
return render_to_response('view/index.html', {'weatherdata': wd})
def all_wd(request):
wd = Weatherdata.objects.select_related('weatherstation', 'weatherstation__city', 'city__country').all()
paginator = Paginator(wd, 10)
try:
page = int(request.GET.get("page", '1'))
except ValueError:
page = 1
try:
wd = paginator.page(page)
except (InvalidPage, EmptyPage):
wd = paginator.page(paginator.num_pages)
return render_to_response('view/all_wd.html', {'weatherdata': wd})
def detail(request, ws_id):
try:
ws = Weatherstation.objects.get(id=ws_id)
wd = Weatherdata.objects.filter(weatherstation_id=ws.id).all()
except Weatherstation.DoesNotExist:
raise Http404
return render_to_response('view/detail.html', {'weatherdata': wd})
def user_ws(request, user_id):
try:
user = User.objects.get(id=user_id)
weatherstation = Weatherstation.objects.select_related('user__weatherdata').filter(user_id=user_id).all()
weather_dict = {}
if Weatherstation.objects.filter(user_id=user_id):
for ws in weatherstation:
wd = Weatherdata.objects.select_related('weatherstation__weatherdata').filter(weatherstation_id=ws.id).\
all()
weather_dict[ws] = wd
else:
return HttpResponse('Keine Wetterstationen verfügbar')
except User.DoesNotExist:
raise Http404
context = {'weather_dict': weather_dict, 'user': user}
return render_to_response('view/user_ws.html', context)
def city_wd(request, city_id):
try:
city = Cities.objects.get(id=city_id)
wd = Weatherdata.objects.select_related('weatherstation', 'weatherstation__city').\
filter(weatherstation__city=city_id).all()
except Weatherstation.DoesNotExist:
raise Http404
context = {'weatherdata': wd, 'city': city}
return render_to_response('view/city_wd.html', context)
@method_decorator(csrf_exempt)
def save_wd(request):
if request.method == 'POST':
humidity = request.POST.get("humidity")
temperature = request.POST.get("temperature")
altitude = request.POST.get("altitude")
air_pressure = request.POST.get("air_pressure")
lightness = request.POST.get("lightness")
weatherstation_id = request.POST.get("weatherstation_id")
#user_id = request.POST.get("user_id")
weatherdata = Weatherdata(humidity=humidity, temperature=temperature, altitude=altitude, air_pressure=air_pressure, lightness=lightness, weatherstation_id=weatherstation_id)
weatherdata.save()
return HttpResponse(weatherdata)
| wegtam/weather-api | view/views.py | Python | bsd-3-clause | 3,212 |
import mxnet as mx
import numpy as np
from operator_py.nms import py_nms_wrapper
def multiclass_nms(nms, cls_score, bbox_xyxy, min_det_score, max_det_per_image):
# remove background
cls_score = cls_score[:, 1:]
# TODO: the output shape of class_agnostic box is [n, 4], while class_aware box is [n, 4 * (1 + class)]
bbox_xyxy = bbox_xyxy[:, 4:] if bbox_xyxy.shape[1] != 4 else bbox_xyxy
num_class = cls_score.shape[1]
cls_det = [np.empty((0, 6), dtype=np.float32) for _ in range(num_class)] # [x1, y1, x2, y2, score, cls]
for cid in range(num_class):
score = cls_score[:, cid]
if bbox_xyxy.shape[1] != 4:
_bbox_xyxy = bbox_xyxy[:, cid * 4:(cid + 1) * 4]
else:
_bbox_xyxy = bbox_xyxy
valid_inds = np.where(score > min_det_score)[0]
box = _bbox_xyxy[valid_inds]
score = score[valid_inds]
det = np.concatenate((box, score.reshape(-1, 1)), axis=1).astype(np.float32)
det = nms(det)
cls = np.full((det.shape[0], 1), cid, dtype=np.float32)
cls_det[cid] = np.hstack((det, cls))
cls_det = np.vstack([det for det in cls_det])
scores = cls_det[:, -2]
top_index = np.argsort(scores)[::-1][:max_det_per_image]
return cls_det[top_index]
class BboxPostProcessingOperator(mx.operator.CustomOp):
def __init__(self, max_det_per_image, min_det_score, nms_type, nms_thr):
super().__init__()
self.max_det_per_image = max_det_per_image
self.min_det_score = min_det_score
self.nms_type = nms_type
self.nms_thr = nms_thr
def forward(self, is_train, req, in_data, out_data, aux):
if self.nms_type == 'nms':
nms = py_nms_wrapper(self.nms_thr)
else:
raise NotImplementedError
cls_score = in_data[0].asnumpy()
bbox_xyxy = in_data[1].asnumpy()
cls_score_shape = cls_score.shape # (b, n, num_class_withbg)
bbox_xyxy_shape = bbox_xyxy.shape # (b, n, 4) or (b, n, 4 * num_class_withbg)
batch_image = cls_score_shape[0]
num_bbox = cls_score_shape[1]
num_class_withbg = cls_score_shape[2]
post_score = np.zeros((batch_image, self.max_det_per_image, 1), dtype=np.float32)
post_bbox_xyxy = np.zeros((batch_image, self.max_det_per_image, 4), dtype=np.float32)
post_cls = np.full((batch_image, self.max_det_per_image, 1), -1, dtype=np.float32)
for i, (per_image_cls_score, per_image_bbox_xyxy) in enumerate(zip(cls_score, bbox_xyxy)):
cls_det = multiclass_nms(nms, per_image_cls_score, per_image_bbox_xyxy, \
self.min_det_score, self.max_det_per_image)
num_det = cls_det.shape[0]
post_bbox_xyxy[i, :num_det] = cls_det[:, :4]
post_score[i, :num_det] = cls_det[:, -2][:, np.newaxis] # convert to (n, 1)
post_cls[i, :num_det] = cls_det[:, -1][:, np.newaxis] # convert to (n, 1)
self.assign(out_data[0], req[0], post_score)
self.assign(out_data[1], req[1], post_bbox_xyxy)
self.assign(out_data[2], req[2], post_cls)
def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
self.assign(in_grad[0], req[0], 0)
self.assign(in_grad[1], req[1], 0)
@mx.operator.register("BboxPostProcessing")
class BboxPostProcessingProp(mx.operator.CustomOpProp):
def __init__(self, max_det_per_image, min_det_score, nms_type, nms_thr):
super().__init__(need_top_grad=False)
self.max_det_per_image = int(max_det_per_image)
self.min_det_score = float(min_det_score)
self.nms_type = str(nms_type)
self.nms_thr = float(nms_thr)
def list_arguments(self):
return ['cls_score', 'bbox_xyxy']
def list_outputs(self):
return ['post_score', 'post_bbox_xyxy', 'post_cls']
def infer_shape(self, in_shape):
cls_score_shape = in_shape[0] # (b, n, num_class_withbg)
bbox_xyxy_shape = in_shape[1] # (b, n, 4) or (b, n, 4 * num_class_withbg)
batch_image = cls_score_shape[0]
post_score_shape = (batch_image, self.max_det_per_image, 1)
post_bbox_xyxy_shape = (batch_image, self.max_det_per_image, 4)
post_cls_shape = (batch_image, self.max_det_per_image, 1)
return [cls_score_shape, bbox_xyxy_shape], \
[post_score_shape, post_bbox_xyxy_shape, post_cls_shape]
def create_operator(self, ctx, shapes, dtypes):
return BboxPostProcessingOperator(self.max_det_per_image, self.min_det_score, self.nms_type, self.nms_thr)
def declare_backward_dependency(self, out_grad, in_data, out_data):
return []
| TuSimple/simpledet | models/maskrcnn/bbox_post_processing.py | Python | apache-2.0 | 4,679 |
from django.conf import settings
CLIENT_ID = getattr(settings, 'EXACT_TARGET_CLIENT_ID', '')
CLIENT_SECRET = getattr(settings, 'EXACT_TARGET_CLIENT_SECRET', '')
WSDL_URL = getattr(settings, 'EXACT_TARGET_WSDL_URL', '')
| roverdotcom/django-fuelsdk | django_fuelsdk/constants.py | Python | mit | 221 |
from django.conf.urls import *
urlpatterns = patterns('django_braintree.views',
url(r'^payments-billing/$', 'payments_billing', name='payments_billing'),
)
| Tivix/django-braintree | django_braintree/urls.py | Python | mit | 162 |
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from .models import Post
class LatestEntriesFeed(Feed):
title = "Transverbis blog"
link = "/"
description = "Transverbis updates"
def items(self):
return Post.objects.filter(published_date__isnull=False).order_by('-published_date')[:10]
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
# item_link is only needed if Post has no get_absolute_url method.
def item_link(self, item):
return reverse('blog:post_detail', args=[item.pk]) | filipok/django-transverbis-blog | djtransverbisblog/feeds.py | Python | mit | 643 |
import os
import tornado
from tornado.websocket import WebSocketHandler
from tornado.web import RequestHandler, StaticFileHandler, Application, url
from tornado.escape import json_encode, json_decode
from tornado import ioloop
from rx.subjects import Subject
UP, DOWN, LEFT, RIGHT, B, A = 38, 40, 37, 39, 66, 65
codes = [UP, UP, DOWN, DOWN, LEFT, RIGHT, LEFT, RIGHT, B, A]
class WSHandler(WebSocketHandler):
def open(self):
print("WebSocket opened")
# A Subject is both an observable and observer, so we can both subscribe
# to it and also feed (on_next) it with new values
self.subject = Subject()
# Now we take on our magic glasses and project the stream of bytes into
# a ...
query = self.subject.map(
lambda obj: obj["keycode"] # 1. stream of keycodes
).window_with_count(
10, 1 # 2. stream of windows (10 ints long)
).select_many(
# 3. stream of booleans, True or False
lambda win: win.sequence_equal(codes)
).filter(
lambda equal: equal # 4. stream of Trues
)
# 4. we then subscribe to the Trues, and signal Konami! if we see any
query.subscribe(lambda x: self.write_message("Konami!"))
def on_message(self, message):
obj = json_decode(message)
self.subject.on_next(obj)
def on_close(self):
print("WebSocket closed")
class MainHandler(RequestHandler):
def get(self):
self.render("index.html")
def main():
port = os.environ.get("PORT", 8080)
app = Application([
url(r"/", MainHandler),
(r'/ws', WSHandler),
(r'/static/(.*)', StaticFileHandler, {'path': "."})
])
print("Starting server at port: %s" % port)
app.listen(port)
ioloop.IOLoop.current().start()
if __name__ == '__main__':
main()
| dbrattli/RxPY | examples/konamicode/konamicode.py | Python | apache-2.0 | 1,905 |
import numpy as np
import scipy.ndimage
import tensorflow as tf
from homer.page import Page
from homer import util
MAX_SIZE = 4096
def get_rotated_page(page):
angle, square_img = get_angle(page)
rotated, = tf.py_func(_rotate_image, [page.image, -angle], [square_img.dtype])
rotated.set_shape(page.image.get_shape())
page = Page(rotated)
page.angle = angle
return page
def get_angle(page):
img = tf.cast(page.image, tf.float32)
square = get_square(img)
f = tf.complex_abs(tf.fft2d(tf.cast(square, tf.complex64))[:MAX_SIZE//2, :])
x_arr = (
tf.cast(tf.concat(0,
[tf.range(MAX_SIZE // 2),
tf.range(1, MAX_SIZE // 2 + 1)[::-1]]),
tf.float32))[None, :]
y_arr = tf.cast(tf.range(MAX_SIZE // 2), tf.float32)[:, None]
f = tf.select(x_arr * x_arr + y_arr * y_arr < 32 * 32, tf.zeros_like(f), f)
m = tf.argmax(tf.reshape(f, [-1]), dimension=0)
x = tf.cast((m + MAX_SIZE // 4) % (MAX_SIZE // 2) - (MAX_SIZE // 4), tf.float32)
y = tf.cast(tf.floordiv(m, MAX_SIZE // 2), tf.float32)
return(tf.cond(
y > 0, lambda: tf.atan(x / y), lambda: tf.constant(np.nan, tf.float32)),
square)
def get_square(img, max_size=MAX_SIZE):
old_shape = tf.shape(img)
new_shape, = tf.py_func(
_resized_shape, [old_shape, max_size], [old_shape.dtype])
resized = util.scale(img, new_shape)
return pad_square(resized, MAX_SIZE)
def pad_square(img, max_size=MAX_SIZE):
shape = tf.shape(img)
padding, = tf.py_func(_get_padding, [shape, max_size], [shape.dtype])
padded = tf.pad(255 - img, padding)
return 255 - padded
def _resized_shape(shape, max_size):
if shape[0] > shape[1]:
return np.array([max_size, shape[1] * float(max_size) / shape[0]],
dtype=shape.dtype)
else:
return np.array([shape[0] * float(max_size) / shape[1], shape[1]],
dtype=shape.dtype)
def _get_padding(shape, max_size):
return np.array([[0, max_size - shape[0]], [0, max_size - shape[1]]],
dtype=shape.dtype)
def _rotate_image(img, angle):
# TODO: Add a GPU rotation op.
if np.isnan(angle):
return img
else:
return scipy.ndimage.interpolation.rotate(
img, np.rad2deg(angle), reshape=False, mode='constant', cval=255.0)
| ringw/MetaOMR | homer/rotate.py | Python | gpl-3.0 | 2,288 |
"""In the pollster approach we no longer see Postalcodes as a separate field
These files are kept for backwards compatability"""
import datetime
import re
from django import forms
from django.utils.safestring import mark_safe
from django.conf import settings
from localflavor.it.forms import ITZipCodeField
from localflavor.nl.forms import NLZipCodeField
from localflavor.gb.forms import GBPostcodeField as fullGBPostcodeField
from localflavor.be.forms import BEPostalCodeField
from localflavor.pt.forms import PTZipCodeField
from localflavor.se.forms import SEPostalCodeField
from .widgets import (AdviseWidget, MonthYearWidget,
DatePickerWidget, DateOrOptionPickerWidget,
TableOptionsSingleWidget, TableOfSelectsWidget, )
__all__ = ['AdviseField', 'MonthYearField', 'PostCodeField', 'DateOrOptionField',
'TableOptionsSingleField', 'TableOfSelectsField', ]
class AdviseField(forms.Field):
widget = AdviseWidget
required = False
def clean(self, value):
return True
class MonthYearField(forms.Field):
widget = MonthYearWidget
def clean(self, value):
"""
Validate month and year values.
This method is derived from Django's DateField's clean()
"""
super(MonthYearField, self).clean(value)
if value in (None, ''):
return None
if isinstance(value, datetime.datetime):
return value.date()
if isinstance(value, datetime.date):
return value
try:
year, month = value.split('-')
year = int(year)
month = int(month)
return datetime.datetime(year, month, 1).date()
except ValueError:
pass
raise forms.ValidationError(self.error_messages['invalid'])
class UKPostcodeField(fullGBPostcodeField):
"""Accept and check only the outcode_pattern of the UK postcode. This is
necessary for privacy reasons since the full post code gives too accurate a
fix on the participant.
"""
outcode_pattern = fullGBPostcodeField.outcode_pattern
postcode_regex = re.compile(r'^(GIR|%s)$' % outcode_pattern)
def clean(self, value):
value = super(UKPostcodeField, self).clean(value)
if value == '':
return value
postcode = value.upper().strip()
if not self.postcode_regex.search(postcode):
raise forms.ValidationError(self.default_error_messages['invalid'])
return postcode
class PostCodeField(forms.RegexField):
country_fields = {
'be': BEPostalCodeField,
'it': ITZipCodeField,
'nl': NLZipCodeField,
'pt': PTZipCodeField,
'se': SEPostalCodeField,
'uk': UKPostcodeField,
}
def __init__(self, *args, **kwargs):
self.country = kwargs.pop('country', settings.COUNTRY)
super(PostCodeField, self).__init__(self.country, *args, **kwargs)
def clean(self, value):
klass = self.country_fields.get(self.country, None)
if klass is None:
klass = self.country_fields['nl']
field = klass()
return field.clean(value)
class DateOrOptionField(forms.MultiValueField):
def __init__(self, *args, **kwargs):
self.option = kwargs.pop('option', '')
self.widget = DateOrOptionPickerWidget(choices=[(0, self.option)])
datefield = forms.DateField(
required=False,
help_text="Date format: day/month/year",
input_formats=['%Y-%m-%d', '%d/%m/%Y',
'%d/%m/%y', '%d-%m-%y',
'%d-%m-%Y', '%b %d %Y',
'%b %d, %Y', '%d %b %Y',
'%d %b, %Y', '%B %d %Y',
'%B %d, %Y', '%d %B %Y',
'%d %B, %Y'])
self.datefield = datefield
self.fields = [datefield, forms.ChoiceField(required=False)]
super(DateOrOptionField, self).__init__(fields=self.fields,
widget=self.widget,
*args, **kwargs)
def compress(self, v):
return v
def clean(self, value):
date, choice = value
if len(choice) > 0: # option was chosen
return choice[0]
else: # use the date
date = self.datefield.clean(date)
if date is None:
if self.required:
raise forms.ValidationError(self.error_messages['required'])
return None
return date
class TableOfSelectsField(forms.MultiValueField):
def __init__(self, rows, columns, choices, *args, **kwargs):
fields = [forms.ChoiceField(label=row, choices=choices, required=False)
for row in rows
for column in columns]
kwargs['widget'] = TableOfSelectsWidget(rows, columns, choices)
super(TableOfSelectsField, self).__init__(fields, *args, **kwargs)
def compress(self, v):
return v
def clean(self, value):
return value
class TableOptionsSingleField(forms.MultiValueField):
def __init__(self, options, rows, required_rows=None, *args, **kwargs):
self.options = options
self.rows = rows
self.required_rows = required_rows
if 'widget' not in kwargs:
widget = TableOptionsSingleWidget(options=self.options,
rows=self.rows)
kwargs['widget'] = widget
if 'fields' not in kwargs:
fields = []
for key, label in self.rows:
field = forms.ChoiceField(label=label,
required=False,
choices=list(self.options))
fields.append(field)
kwargs['fields'] = fields
super(TableOptionsSingleField, self).__init__(**kwargs)
def compress(self, value):
return value
def clean(self, value):
return value
def clean_all(self, field, values):
required = self.required_rows
if required is None:
required = range(0, len(self.rows))
elif callable(required):
required = required(values)
filled = []
if values[field] is not None:
for index, value in enumerate(values[field]):
if value is not None:
filled.append(index)
for index in required:
if index not in filled:
raise forms.ValidationError('Incomplete answer')
return values[field]
| hrpt-se/hrpt | apps/survey/forms/fields.py | Python | agpl-3.0 | 6,667 |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from contextlib import contextmanager
from textwrap import dedent
from pants.util.contextutil import temporary_dir
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class JvmPlatformAnalysisIntegrationTest(PantsRunIntegrationTest):
"""Make sure jvm-platform-analysis runs properly, especially with respect to caching behavior."""
class JavaSandbox(object):
"""Testing sandbox for making temporary java_library targets."""
def __init__(self, test, workdir, javadir):
self.javadir = javadir
self.workdir = workdir
self.test = test
if not os.path.exists(self.workdir):
os.makedirs(self.workdir)
@property
def build_file_path(self):
return os.path.join(self.javadir, 'BUILD')
def write_build_file(self, contents):
with open(self.build_file_path, 'w') as f:
f.write(contents)
def spec(self, name):
return '{}:{}'.format(self.javadir, name)
def clean_all(self):
return self.test.run_pants(['clean-all'])
def jvm_platform_validate(self, *targets):
return self.test.run_pants_with_workdir(['jvm-platform-validate', '--check=fatal']
+ map(self.spec, targets),
workdir=self.workdir)
@contextmanager
def setup_sandbox(self):
with temporary_dir('.') as tempdir:
workdir = os.path.abspath(tempdir)
javadir = os.path.join(tempdir, 'src', 'java')
os.makedirs(javadir)
with open(os.path.join(workdir, 'BUILD'), 'w') as f:
f.write(dedent('''
source_root('src/java', java_library)
'''))
yield self.JavaSandbox(self, os.path.join(workdir, '.pants.d'), javadir)
@property
def _good_one_two(self):
return dedent('''
java_library(name='one',
platform='1.7',
)
java_library(name='two',
platform='1.8',
)
''')
@property
def _bad_one_two(self):
return dedent('''
java_library(name='one',
platform='1.7',
dependencies=[':two'],
)
java_library(name='two',
platform='1.8',
)
''')
def test_good_targets_works_fresh(self):
with self.setup_sandbox() as sandbox:
sandbox.write_build_file(self._good_one_two)
self.assert_success(sandbox.clean_all())
self.assert_success(sandbox.jvm_platform_validate('one', 'two'))
def test_bad_targets_fails_fresh(self):
with self.setup_sandbox() as sandbox:
sandbox.write_build_file(self._bad_one_two)
self.assert_success(sandbox.clean_all())
self.assert_failure(sandbox.jvm_platform_validate('one', 'two'))
def test_good_then_bad(self):
with self.setup_sandbox() as sandbox:
sandbox.write_build_file(self._good_one_two)
self.assert_success(sandbox.clean_all())
self.assert_success(sandbox.jvm_platform_validate('one', 'two'))
sandbox.write_build_file(self._bad_one_two)
self.assert_failure(sandbox.jvm_platform_validate('one', 'two'))
def test_bad_then_good(self):
with self.setup_sandbox() as sandbox:
sandbox.write_build_file(self._bad_one_two)
self.assert_success(sandbox.clean_all())
self.assert_failure(sandbox.jvm_platform_validate('one', 'two'))
sandbox.write_build_file(self._good_one_two)
self.assert_success(sandbox.jvm_platform_validate('one', 'two'))
def test_bad_then_good(self):
with self.setup_sandbox() as sandbox:
sandbox.write_build_file(self._bad_one_two)
self.assert_success(sandbox.clean_all())
self.assert_failure(sandbox.jvm_platform_validate('one', 'two'))
sandbox.write_build_file(self._good_one_two)
self.assert_success(sandbox.jvm_platform_validate('one', 'two'))
def test_good_caching(self):
# Make sure targets are cached after a good run.
with self.setup_sandbox() as sandbox:
sandbox.write_build_file(self._good_one_two)
self.assert_success(sandbox.clean_all())
first_run = sandbox.jvm_platform_validate('one', 'two')
self.assert_success(first_run)
self.assertIn('Invalidated 2 targets', first_run.stdout_data)
second_run = sandbox.jvm_platform_validate('one', 'two')
self.assert_success(second_run)
self.assertNotIn('Invalidated 2 targets', second_run.stdout_data)
def test_bad_caching(self):
# Make sure targets aren't cached after a bad run.
with self.setup_sandbox() as sandbox:
sandbox.write_build_file(self._bad_one_two)
self.assert_success(sandbox.clean_all())
first_run = sandbox.jvm_platform_validate('one', 'two')
self.assert_failure(first_run)
self.assertIn('Invalidated 2 targets', first_run.stdout_data)
second_run = sandbox.jvm_platform_validate('one', 'two')
self.assert_failure(second_run)
self.assertIn('Invalidated 2 targets', second_run.stdout_data)
| sid-kap/pants | tests/python/pants_test/backend/jvm/tasks/test_jvm_platform_analysis_integration.py | Python | apache-2.0 | 5,205 |
import copy
import operator
import numbers
import numpy as np
from .op_base import OpBase, OpBase, OpBase, OpBase
__all__ = ['add', 'sub', 'mul', 'truediv', 'neg', 'pow', 'sum']
class add(OpBase):
def __init__(self, a, b):
OpBase.__init__(self, operator.add, (a, b), name="add")
class sub(OpBase):
def __init__(self, a, b):
OpBase.__init__(self, operator.sub, (a, b), name="sub")
class mul(OpBase):
def __init__(self, a, b):
OpBase.__init__(self, operator.mul, (a, b), name="mul")
class truediv(OpBase):
def __init__(self, a, b):
OpBase.__init__(self, operator.truediv, (a, b), name="div")
class neg(OpBase):
def __init__(self, a):
OpBase.__init__(self, operator.neg, (a,), name="neg")
class pow(OpBase):
def __init__(self, a, b):
OpBase.__init__(self, operator.pow, (a, b), name="pow")
class sum(OpBase):
def __init__(self, a, axis=None):
self.axis = copy.copy(axis)
OpBase.__init__(self, lambda x: x.sum(self.axis),
(a,), name='sum')
| qiqi/fds | pascal_lite/operators/arithmetics.py | Python | gpl-3.0 | 1,062 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'/Users/ronmarianetti/nta/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'days': 0,
'fields': [ (u'timestamp', 'first'),
(u'gym', 'first'),
(u'consumption', 'mean'),
(u'address', 'first')],
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalNextStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': { 'address': { 'fieldname': u'address',
'n': 100,
'name': u'address',
'type': 'SDRCategoryEncoder',
'w': 7},
'consumption': { 'clipInput': True,
'fieldname': u'consumption',
'maxval': 200,
'minval': 0,
'n': 500,
'name': u'consumption',
'type': 'ScalarEncoder',
'w': 7},
'gym': { 'fieldname': u'gym',
'n': 100,
'name': u'gym',
'type': 'SDRCategoryEncoder',
'w': 7},
'timestamp_dayOfWeek': { 'dayOfWeek': (7, 3),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'},
'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': (7, 8),
'type': 'DateEncoder'}},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActivePerInhArea': 40,
'seed': 1956,
# coincInputPoolPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose coincInputPoolPct * (2*coincInputRadius+1)^2
'coincInputPoolPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nta/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 15,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1',
},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
################################################################################
control = {
# The environment that the current model is being run in
"environment": 'grok',
# Input stream specification per py/grokengine/cluster/database/StreamDef.json.
#
'dataset' : {u'info': u'test_NoProviders',
u'streams': [ { u'columns': [u'*'],
u'info': "test data",
u'source': "file://test_data.csv"}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
#'iterationCount' : ITERATION_COUNT,
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'consumption',inferenceElement=InferenceElement.prediction,
metric='rmse'),
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
}
################################################################################
################################################################################
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
| Petr-Kovalev/nupic-win32 | tests/integration/py2/nupic/swarming/experiments/dummyV2/description.py | Python | gpl-3.0 | 15,546 |
'''
Simple script to interact with fusion level 02 challenge network daemon,
mkocbayi@gmail.com
'''
from pwn import *
import sys
def doMode(mode): # Either E or Q
print 'Sending mode call: {}'.format(mode)
#Specify encryption function
io.send(mode)
def doEncryption(message, fake_message_size=None):
doMode('E')
if fake_message_size is not None:
message_size = fake_message_size
else:
message_size = len(message)
#Specify message size as little endian 8(d) = \x08\x00\x00\x00
encryption_size_bytes = p32(message_size) #Use p32, p64, or pack
print 'Sending message size as bytes\n{}'.format(encryption_size_bytes.encode('hex'))
print 'Sending message size as bytes\n{}'.format(unpack(encryption_size_bytes))
#Specify size of message to be encrypted
io.send(encryption_size_bytes)
#Generate message and send
print 'Sending message\n{}'.format(hexdump(message))
io.send(message)
data = io.recvregex('your file --]\n')
log.info(data)
#Server sends message size as 4 bytes little endian
data = io.recvn(4)
log.info('Received encrypted message size as bytes\n{}'.format(data.encode('hex')))
log.info('Size in integer\n{}'.format(unpack(data)))
encrypted_message = io.recvn(message_size)
log.info('Received encrypted message\n{}'.format(hexdump(encrypted_message)))
return encrypted_message
if __name__ == "__main__":
host = sys.argv[1]
port = sys.argv[2]
io = remote(host,int(port))
#size = 32*4096 # No crash
# xor key is 32*4 = 128 bytes
message_size = 32+32
message = 'A'*message_size
xor_message = doEncryption(message)
doMode('Q')
| muttiopenbts/fusion | fusion_level02_0.py | Python | gpl-3.0 | 1,689 |
# -*- coding: utf-8 -*-
# Django settings
import os.path
from django.template.defaultfilters import slugify
PROJECT_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# serve media through the staticfiles app.
SERVE_MEDIA = DEBUG
INTERNAL_IPS = [
"127.0.0.1",
]
ADMINS = [
# ("Your Name", "your_email@domain.com"),
]
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = "US/Eastern"
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en-us"
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, "media")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = "/media/"
# Absolute path to the directory that holds static files like app media.
# Example: "/home/media/media.lawrence.com/apps/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, "collected_static")
# URL that handles the static files like app media.
# Example: "http://media.lawrence.com"
STATIC_URL = "/static/"
# Additional directories which hold static files
STATICFILES_DIRS = [
os.path.join(PROJECT_ROOT, "static"),
]
# Use the default admin media prefix, which is...
#ADMIN_MEDIA_PREFIX = "/static/admin/"
# List of callables that know how to import templates from various sources.
if DEBUG:
CACHE_BACKEND = 'dummy://'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
else:
CACHE_BACKEND = 'dummy://'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = [
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"reversion.middleware.RevisionMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"pagination.middleware.PaginationMiddleware",
"django_sorting.middleware.SortingMiddleware",
]
TEMPLATE_DIRS = [
os.path.join(PROJECT_ROOT, "templates"),
]
TEMPLATE_CONTEXT_PROCESSORS = [
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.static",
"package.context_processors.used_packages_list",
"grid.context_processors.grid_headers",
"core.context_processors.current_path",
"profiles.context_processors.lazy_profile",
"core.context_processors.core_values",
]
PROJECT_APPS = [
"grid",
'core',
"homepage",
"package",
"profiles",
"apiv1",
"feeds",
"pypi",
"searchv2",
"importer",
]
PREREQ_APPS = [
# Django
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.messages",
"django.contrib.humanize",
"django.contrib.staticfiles",
# external
"uni_form",
"pagination",
"django_extensions",
"south",
"tastypie",
"reversion",
"django_sorting",
#"django_modeler",
# Celery task queue:
#'djcelery',
'social_auth',
]
INSTALLED_APPS = PREREQ_APPS + PROJECT_APPS
FIXTURE_DIRS = [
os.path.join(PROJECT_ROOT, "fixtures"),
]
MESSAGE_STORAGE = "django.contrib.messages.storage.session.SessionStorage"
ABSOLUTE_URL_OVERRIDES = {
"auth.user": lambda o: "/profiles/profile/%s/" % o.username,
}
AUTH_PROFILE_MODULE = "profiles.Profile"
LOGIN_URL = "/login/"
LOGIN_REDIRECT_URLNAME = "home"
EMAIL_CONFIRMATION_DAYS = 2
EMAIL_DEBUG = DEBUG
CACHE_TIMEOUT = 60 * 60
ROOT_URLCONF = "opencomparison.urls"
SECRET_KEY = "CHANGEME"
URCHIN_ID = ""
DEFAULT_FROM_EMAIL = 'Django Packages <djangopackages-noreply@djangopackages.com>'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_USE_TLS = True
EMAIL_HOST = 'localhost'
EMAIL_HOST_USER = 'djangopackages-noreply@djangopackages.com'
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 25
EMAIL_SUBJECT_PREFIX = '[Django Packages] '
DEBUG_TOOLBAR_CONFIG = {
"INTERCEPT_REDIRECTS": False,
}
if DEBUG:
CACHE_BACKEND = 'dummy://'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
#TEST_RUNNER = 'testrunner.OurTestRunner'
TEST_RUNNER = 'testrunner.OurCoverageRunner'
COVERAGE_MODULE_EXCLUDES = [
'tests$', 'settings$', 'urls$', 'locale$',
'migrations', 'fixtures', 'big_email_send$',
'load_dev_data$', 'fix_grid_element$',
'package_updater$', 'searchv2_build$'
]
COVERAGE_MODULE_EXCLUDES += PREREQ_APPS + ["djkombu", ]
COVERAGE_REPORT_HTML_OUTPUT_DIR = "coverage"
PACKAGINATOR_HELP_TEXT = {
"REPO_URL": "Enter your project repo hosting URL here.<br />Example: https://github.com/opencomparison/opencomparison",
"PYPI_URL": "<strong>Leave this blank if this package does not have a PyPI release.</strong><br />What PyPI uses to index your package. <br />Example: django-uni-form",
}
PACKAGINATOR_SEARCH_PREFIX = "django"
# if set to False any auth user can add/modify packages
# only django admins can delete
RESTRICT_PACKAGE_EDITORS = True
# if set to False any auth user can add/modify grids
# only django admins can delete
RESTRICT_GRID_EDITORS = True
# package extenders are dicts that can include:
# form
# model
# grid_items
# package_displays
PACKAGE_EXTENDERS = []
CELERYD_TASK_TIME_LIMIT = 300
LAUNCHPAD_ACTIVE = False
LOCAL_INSTALLED_APPS = []
SUPPORTED_REPO = []
########################## Site specific stuff
FRAMEWORK_TITLE = "Django"
SITE_TITLE = "Django Packages"
# local_settings.py can be used to override environment-specific settings
# like database and email that differ between development and production.
try:
from local_settings import *
except ImportError:
pass
if LOCAL_INSTALLED_APPS:
INSTALLED_APPS.extend(LOCAL_INSTALLED_APPS)
SUPPORTED_REPO.extend(["bitbucket", "github"])
if LAUNCHPAD_ACTIVE:
SUPPORTED_REPO += ["launchpad"]
AUTHENTICATION_BACKENDS = (
'social_auth.backends.contrib.github.GithubBackend',
'django.contrib.auth.backends.ModelBackend',
)
SOCIAL_AUTH_ENABLED_BACKENDS = ('github')
SOCIAL_AUTH_COMPLETE_URL_NAME = 'socialauth_complete'
SOCIAL_AUTH_ASSOCIATE_URL_NAME = 'associate_complete'
SOCIAL_AUTH_DEFAULT_USERNAME = lambda u: slugify(u)
SOCIAL_AUTH_EXTRA_DATA = False
SOCIAL_AUTH_CHANGE_SIGNAL_ONLY = True
LOGIN_REDIRECT_URL = '/'
# associate user via email
SOCIAL_AUTH_ASSOCIATE_BY_MAIL = True
DATABASES = {
"default": {
"ENGINE": "postgresql_psycopg2",
"NAME": "oc", # Or path to database file if using sqlite3.
"USER": "", # Not used with sqlite3.
"PASSWORD": "", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
},
}
| audreyr/opencomparison | settings/base.py | Python | mit | 7,994 |
class BaiduSearch(object):
def __init__(self):
pass
def __call__(self, client, api, **kw):
"""
client --
client
api --
"""
client.driver.get("http://www.baidu.com")
input = client.e("#kw")
input.clear()
input.send_keys(kw['keyword'])
submit = client.e("#su")
submit.click()
#path = client.real_path("screen.png")
client.screenshot_as_file("screen.png")
result_list = client.es(".result tr div.f13")
for item in result_list:
print item.text
print "kw:%s" % str(kw)
| emop/webrobot | hello_baidu/libs/actions.py | Python | gpl-2.0 | 725 |
#!/usr/bin/python2
# otl2tags.py
# Convert an OTL file to any tags-based file using config user-
# definable configuration files. HTML, OPML, XML, LATEX and
# many, many others should be easily supportables.
#
# Copyright (c) 2005-2010 Noel Henson All rights reserved
###########################################################################
# Basic function
#
# This program accepts text outline files in Vim Outliners .otl format
# and converts them to a tags-based equivalent
###########################################################################
# include whatever mdules we need
import sys
from ConfigParser import ConfigParser
import re
###########################################################################
# global variables
config = ConfigParser() # configuration
linecount = 0 # outline size in lines
parents = [] # parent stack, (linenum, enum) enum is an order numer
v = {} # variable dictionary for substitution
outline = [] # line tuples (value, indent)
output = [] # output outline
escapeDict = {} # dictionary of character escape codes
debug = 0
inputfile = ""
###########################################################################
# arugment, help and debug functions
# usage
# print debug statements
# input: string
# output: string printed to standard out
def dprint(*vals):
global debug
if debug != 0:
print >> sys.stderr, vals
# usage
# print the simplest form of help
# input: none
# output: simple command usage is printed on the console
def showUsage():
print """
Usage:
otl2table.py [options] inputfile
Options
-c config-file
-d debug
--help show help
output filenames are based on the input file name and the config file
"""
# getArgs
# Check for input arguments and set the necessary switches
# input: none
# output: possible console output for help, switch variables may be set
def getArgs():
global inputfile, debug, noTrailing, formatMode, config
if (len(sys.argv) == 1):
showUsage()
sys.exit()()
else:
for i in range(len(sys.argv)):
if (i != 0):
if (sys.argv[i] == "-c"): # test for the type flag
config.read(sys.argv[i + 1]) # read the config
i = i + 1 # increment the pointer
elif (sys.argv[i] == "-d"):
debug = 1
elif (sys.argv[i] == "-?"): # test for help flag
showUsage() # show the help
sys.exit() # exit
elif (sys.argv[i] == "--help"):
showUsage()
sys.exit()
elif (sys.argv[i] == "-h"):
showUsage()
sys.exit()
elif (sys.argv[i][0] == "-"):
print "Error! Unknown option. Aborting"
sys.exit()
else: # get the input file name
inputfile = sys.argv[i]
# printConfig
# Debugging routine to print the parsed configuration file
# input: none
# output: configuration data printed to console
def printConfig():
global config
print >> sys.stderr, "Config ---------------------------------------------"
list = config.sections()
for i in range(len(list)):
print >> sys.stderr
print >> sys.stderr, list[i]
for x in config.options(list[i]):
if (x != "name") and (x != "__name__"):
print >> sys.stderr, x, ":", config.get(list[i], x)
print >> sys.stderr, "----------------------------------------------------"
print >> sys.stderr
###########################################################################
# low-level outline processing functions
# indentLevel
# get the level of the line specified by linenum
# input: line
# output: returns the level number, 1 is the lowest
def indentLevel(line):
strstart = line.lstrip() # find the start of text in line
x = line.find(strstart) # find the text index in the line
n = line.count("\t", 0, x) # count the tabs
n = n + line.count(" ", 0, x) # count the spaces
return(n + 1) # return the count + 1 (for level)
# stripMarker
# return a line without its marker and leading and trailing whitespace
# input: line, marker
# output: stripped line
def stripMarker(line, marker):
return line.lstrip(marker).strip()
# getLineType
# return the type of the line specified by linenum
# input: line
# output: returns text, usertext, table, preftext, etc.
def getLineType(line):
if (line[0] == ':'):
return 'text'
elif (line[0] == ';'):
return 'preftext'
elif (line[0] == '>'):
return 'usertext'
elif (line[0] == '<'):
return 'userpreftext'
elif (line[0] == '|'):
return 'table'
elif (line[0] == '-'):
return 'bulletheading'
elif (line[0] == '+'):
return 'numberheading'
# elif (line[0] == '['):
# return 'checkboxheading'
elif (line[0] == ''):
return 'blank'
else:
return 'heading'
# getChildren
# return a list of line numbers for children of the passed line number
# input: linenum
# output: a (possibly) empty list of children
def getChildren(linenum):
global outline, linecount
children = []
mylevel = outline[linenum][1]
childlevel = mylevel + 1
linenum = linenum + 1
while (linenum < linecount) and (outline[linenum][1] > mylevel):
if (outline[linenum][1] == childlevel):
children.append(linenum)
linenum = linenum + 1
return children
# subTags
# substitute variables in output expressions
# input: section - section from config
# input: type - object type (to look up in config)
# input: - substitution item (by name) from config array
# output: string - the substitution expression with variables inserted
def subTags(section, type):
global config, v, parents
varlist = v.keys()
pattern = config.get(section, type)
if len(parents) > 0:
v["%p"] = str(parents[len(parents) - 1])
for var in varlist:
x = ""
x = var
y = ""
y = v.get(var)
pattern = re.sub(x, y, pattern)
return pattern
#getBlock
#return a list of lines that match a mark (like : or ;)
#input: line number
#output: list of stripped lines
def getBlock(linenum, marker):
global outline, linecount
lines = []
line = outline[linenum][0]
while line[0] == marker:
lines.append(stripMarker(line, marker))
linenum = linenum + 1
if linenum == linecount:
break
line = outline[linenum][0]
return lines
#getUnstrippedBlock
#return a list of lines that match a mark (like : or ;)
#input: line number
#output: list of stripped lines
def getUnstrippedBlock(linenum, marker):
global outline, linecount
lines = []
line = outline[linenum][0]
while line[0] == marker:
lines.append(line)
linenum = linenum + 1
if linenum == linecount:
break
line = outline[linenum][0]
return lines
###########################################################################
# embedded object processing functions
# buildEscapes
# construct the dictionary for escaping special characters
# intput: config:escapes
# output: filled escapes dictionary
def buildEscapes():
escapes = config.get("Document", "escapes")
if len(escapes):
list = escapes.split(" ")
for pair in list:
key, value = pair.split(",")
escapeDict[key] = value
# charEscape
# escape special characters
# input: line
# output: modified line
def charEscape(line):
return "".join(escapeDict.get(c, c) for c in line)
# getURL
# if there is a url, [url text], return the extracted link, url and value
# input: line
# output: link, url, text
def getURL(line):
tags = []
for tag in line.split("]"):
tags.append(tag.split("["))
for tag in tags:
if len(tag) > 1 and re.search(" ", tag[1]):
link = tag[1]
url, text = link.split(" ", 1)
link = "[" + tag[1] + "]"
return link, url, text
# return link.group(0), url, text
# else:
# return None, None, None
return None, None, None
def handleURL(line):
link, url, text = getURL(line)
if link is None:
return re.replace(line, "[url]", "")
v["%u"] = url
v["%v"] = text
text = subTags("URLs", "url")
line = re.replace(line, link, text)
url = subTags("URLs", "url-attr")
line = re.replace(line, "[url]", url)
return line
###########################################################################
# outline header processing functions
# all outline object processors accept and output the following:
# input: linenum, enum
# output: print the output for each object
def handleHeading(linenum, enum):
global outline, parents
line = outline[linenum][0]
# url handling
# extract url data from line
# replace url object in line
# subTags line
# replace url attribute marker
v["%%"] = line
v["%l"] = str(outline[linenum][1])
v["%n"] = str(linenum)
v["%c"] = str(enum)
children = getChildren(linenum)
if enum == 1:
output.append(subTags("Headings", "before-headings"))
if children:
output.append(subTags("Headings", "branch-heading"))
parents.append([linenum, enum])
handleObjects(children)
parents.pop()
output.append(subTags("Headings", "after-headings"))
else:
output.append(subTags("Headings", "leaf-heading"))
def handleBulleted(linenum, enum):
global outline, parents
v["%%"] = outline[linenum][0]
v["%l"] = str(outline[linenum][1])
v["%n"] = str(linenum)
v["%c"] = str(enum)
children = getChildren(linenum)
if enum == 1:
output.append(subTags("Headings", "before-bulleted-headings"))
if children:
output.append(subTags("Headings", "bulleted-branch-heading"))
parents.append([linenum, enum])
handleObjects(children)
parents.pop()
output.append(subTags("Headings", "after-bulleted-headings"))
else:
output.append(subTags("Headings", "bulleted-leaf-heading"))
def handleNumbered(linenum, enum):
global outline, parents
v["%%"] = outline[linenum][0]
v["%l"] = str(outline[linenum][1])
v["%n"] = str(linenum)
v["%c"] = str(enum)
children = getChildren(linenum)
if enum == 1:
output.append(subTags("Headings", "before-numbered-headings"))
if children:
output.append(subTags("Headings", "numbered-branch-heading"))
parents.append([linenum, enum])
handleObjects(children)
parents.pop()
output.append(subTags("Headings", "after-numbered-headings"))
else:
output.append(subTags("Headings", "numbered-leaf-heading"))
###########################################################################
# outline text block processing functions
# all outline object processors accept and output the following:
# input: linenum, enum
# output: print the output for each object
def handleText(linenum, enum):
global outline, parents
if enum != 1:
return # only execute for first call
v["%l"] = str(outline[linenum][1])
v["%n"] = str(linenum)
v["%c"] = str(enum)
list = getBlock(linenum, ':')
output.append(subTags("Text", "before"))
lines = ""
for line in list:
if line == "":
lines = lines + config.get("Text", "paragraph-sep")
else:
lines = lines + line + config.get("Text", "line-sep")
v["%%"] = lines
output.append(subTags("Text", "text"))
output.append(subTags("Text", "after"))
def handleUserText(linenum, enum):
global outline, parents
if enum != 1:
return # only execute for first call
v["%l"] = str(outline[linenum][1])
v["%n"] = str(linenum)
v["%c"] = str(enum)
list = getBlock(linenum, '>')
output.append(subTags("UserText", "before"))
lines = ""
for line in list:
if line == "":
lines = lines + config.get("UserText", "paragraph-sep")
else:
lines = lines + line + config.get("UserText", "line-sep")
v["%%"] = lines.strip() # remove a possible extra separator
output.append(subTags("UserText", "text"))
output.append(subTags("UserText", "after"))
def handlePrefText(linenum, enum):
global outline, parents
if enum != 1:
return # only execute for first call
v["%l"] = str(outline[linenum][1])
v["%n"] = str(linenum)
v["%c"] = str(enum)
list = getBlock(linenum, ';')
output.append(subTags("PrefText", "before"))
lines = ""
for line in list:
if line == "":
lines = lines + config.get("PrefText", "paragraph-sep")
else:
lines = lines + line + config.get("PrefText", "line-sep")
v["%%"] = lines.strip() # remove a possible extra separator
output.append(subTags("PrefText", "text"))
output.append(subTags("PrefText", "after"))
def handleUserPrefText(linenum, enum):
global outline, parents
if enum != 1:
return # only execute for first call
v["%l"] = str(outline[linenum][1])
v["%n"] = str(linenum)
v["%c"] = str(enum)
list = getBlock(linenum, '<')
output.append(subTags("UserPrefText", "before"))
lines = ""
for line in list:
if line == "":
lines = lines + config.get("UserPrefText", "paragraph-sep")
else:
lines = lines + line + config.get("UserPrefText", "line-sep")
v["%%"] = lines.strip() # remove a possible extra separator
output.append(subTags("UserPrefText", "text"))
output.append(subTags("UserPrefText", "after"))
###########################################################################
# outline table processing functions
# isAlignRight
# return flag
# input: col, a string
def isAlignRight(col):
l = len(col)
if (col[0:2] == " ") and (col[l - 2:l] != " "):
return 1
else:
return 0
# isAlignLeft
# return flag
# input: col, a string
def isAlignLeft(col):
l = len(col)
if (col[0:2] != " ") and (col[l - 2:l] == " "):
return 1
else:
return 0
# isAlignCenter
# return flag
# input: col, a string
def isAlignCenter(col):
l = len(col)
if (col[0:2] == " ") and (col[l - 2:l] == " "):
return 1
else:
return 0
# handleHeaderRow
# process a non-header table row
# input: row
# output: print the output for each object
def handleHeaderRow(row):
global outline, parents
row = row.rstrip("|").lstrip("|")
columns = row.split("|")
output.append(subTags("Tables", "before-table-header"))
for col in columns:
v["%%"] = col.strip()
if isAlignCenter:
output.append(subTags("Tables", "table-header-column-center"))
elif isAlignCenter:
output.append(subTags("Tables", "table-header-column-center"))
elif isAlignCenter:
output.append(subTags("Tables", "table-header-column-center"))
else:
output.append(subTags("Tables", "table-header-column"))
output.append(subTags("Tables", "after-table-header"))
# handleRow
# process a non-header table row
# input: row
# output: print the output for each object
def handleRow(row):
global outline, parents
if row[0:2] == "||":
handleHeaderRow(row)
return
row = row.rstrip("|").lstrip("|")
columns = row.split("|")
output.append(subTags("Tables", "before-table-row"))
for col in columns:
v["%%"] = col.strip()
if isAlignCenter:
output.append(subTags("Tables", "table-column-center"))
elif isAlignLeft:
output.append(subTags("Tables", "table-column-left"))
elif isAlignRight:
output.append(subTags("Tables", "table-column-right"))
else:
output.append(subTags("Tables", "table-column"))
output.append(subTags("Tables", "after-table-row"))
# handleTable
# process a table
# input: linenum, enum
# output: print the output for each object
def handleTable(linenum, enum):
global outline, parents
if enum != 1:
return # only execute for first call
v["%l"] = str(outline[linenum][1])
v["%n"] = str(linenum)
v["%c"] = str(enum)
list = getUnstrippedBlock(linenum, '|')
output.append(subTags("Tables", "before"))
for row in list:
handleRow(row)
output.append(subTags("Tables", "after"))
###########################################################################
# outline wrapper processing functions
# addPreamble
# create the 'header' for the output document
# input: globals
# output: standard out
def addPreamble():
global outline, v
v["%%"] = ""
output.append(subTags("Document", "preamble"))
# addPostamble
# create the 'header' for the output document
# input: globals
# output: standard out
def addPostamble():
global outline, v
v["%%"] = ""
output.append(subTags("Document", "postamble"))
###########################################################################
# outline tree fuctions
# handleObject
# take an object and invoke the appropriate fuction to precess it
# input: linenum, enum (enum is the child order number of a parent)
# output: print the output of a object
def handleObject(linenum, enum):
global outline, linecount
obj = getLineType(outline[linenum][0])
if obj == 'heading':
handleHeading(linenum, enum)
elif obj == 'bulled':
handleBulleted(linenum, enum)
elif obj == 'numbered':
handleNumbered(linenum, enum)
elif obj == 'text':
handleText(linenum, enum)
elif obj == 'usertext':
handleUserText(linenum, enum)
elif obj == 'preftext':
handlePrefText(linenum, enum)
elif obj == 'userpreftext':
handleUserPrefText(linenum, enum)
elif obj == 'table':
handleTable(linenum, enum)
else:
print
print "Error: unknown line type @ ", linenum
sys.exit(1)
# handleObjects
# take an object list and invoke the appropriate fuctions to precess it
# input: linenum
# output: print the output of a object
def handleObjects(objs):
for i in range(len(objs)):
handleObject(objs[i], i + 1)
###########################################################################
# file functions
# readFile
# read the selected file into lines[]
# input: filename to be loaded
# output: a loaded-up lines[]
def readFile(inputfile):
global outline, linecount, config
file = open(inputfile, "r")
linein = file.readline()
while linein != "":
indent = indentLevel(linein)
line = charEscape(linein.strip())
outline.append([line, indent])
linein = file.readline()
file.close
outline[0][1] = 0 # set the first line to level 0
linecount = len(outline)
###########################################################################
# Main Program Loop
def main():
global outline, inputfile, linecount
# get the arguments
getArgs()
# constuct the escapes dictionary
buildEscapes()
# read the input file
readFile(inputfile)
# get the title
v["%t"] = outline[0][0].strip()
# construct the initial data
# parsing headings, text and tables
# but not parsing links or images
addPreamble()
if config.get("Document", "first-is-node") == "true":
objs = [0]
else:
objs = getChildren(0)
handleObjects(objs)
addPostamble()
# handle embeded objects
# parsing and constructing links, images and other embedded objects
for i in range(len(output)):
output[i] = handleURL(output[i])
# output the final data
for line in output:
if line.strip() != "":
print line.strip()
main()
| Kriegslustig/dotfiles | .vim/vimoutliner/scripts/otl2tags.py | Python | unlicense | 20,128 |
#!/usr/bin/env python
import random
from IPython.display import Image, HTML, display
import webbrowser
def showStudents(groups):
# make student pictures display in their groups in an IPython notebook
# NB: requires that image file names are the same as student names in the input list
photoDir = './photodir/'
#take each group list and parse student names into image file names (strip space, add .jpg)
for i in range(0,len(groups)):
studentGroup = groups[i]
listOfImageNames = [ ]
captionNames = ' '
for j in range(0,len(studentGroup)):
individualName = studentGroup[j]
individualName = individualName.replace(" ", "")
individualName+='.jpg'
fileName = photoDir+individualName
listOfImageNames.append(fileName)
# we also want the student name captions to be generated automatically from file names
if j != len(studentGroup)-1:
captionNames += studentGroup[j] + ', '
else:
captionNames += studentGroup[j]
#display each group member's image with their name
preFormatHTMLTags = ["<figure>"]
postFormatHTMLTags = ["</figure>"]
imageSources = [ "<img style='width: 200px; border: 1px solid black;' src='%s' />" % str(s) for s in listOfImageNames ]
preCaptionsTag = ["<figcaption><h1>"]
captionMid = [ captionNames ]
postCaptionsTag = ["</figcaption>"]
fullImageDisplayTags = preFormatHTMLTags + imageSources + preCaptionsTag + captionMid + postCaptionsTag + postFormatHTMLTags
imagesList=' '.join( fullImageDisplayTags )
display(HTML(imagesList))
def showStudentsInBrowser(groups):
# make student pictures display in their groups in a local browser window
# NB: requires that image file names are the same as student names in the input list
# default browser is chrome, preferred browser can be set by altering the below
browser_path = 'open -a /Applications/Google\ Chrome.app %s'
photoDir = './photodir/'
outFile = open("groups.html", "w")
#create html to go before and after code generated for student groups
htmlPreamble = "<!DOCTYPE html><html><head><style>table { font-family: arial, sans-serif; border-collapse: collapse; } td, th { border: 1px solid #dddddd; text-align: center; padding: 0px;} tr:nth-child(even) { background-color: #dddddd;}</style></head><body><table>"
htmlClosing = "</table></body></html>"
#take each group list and parse student names into image file names (strip space, add .jpg)
outFile.write(htmlPreamble)
for i in range(0,len(groups)):
studentGroup = groups[i]
captionNames = [ ]
listOfImageNames = [ ]
for j in range(0,len(studentGroup)):
individualName = studentGroup[j]
individualName = individualName.replace(" ", "")
individualName+='.jpg'
fileName = photoDir+individualName
listOfImageNames.append(fileName)
# we also want the student name captions to be generated automatically from file names
captionNames.append(studentGroup[j])
#construct html to display each group member's image with their name
preFormatHTMLTags = ["<tr>"]
postFormatHTMLTags = ["</tr>"]
lineBreak = ["</tr><tr>"]
imageSources = [ "<td><img style='width: 200px; border: 1px solid black;' src='%s' /><td>" % str(s) for s in listOfImageNames ]
captionSources = [ "<td><h1> %s </h1><td>" % str(s) for s in captionNames ]
fullImageDisplayTags = preFormatHTMLTags + imageSources + lineBreak + captionSources + postFormatHTMLTags
imagesList=' '.join( fullImageDisplayTags )
outFile.write(imagesList)
#replace the below with writing the html file and displaying it in a browser window
outFile.write(htmlClosing)
outFile.close()
brwsr = webbrowser.get(browser_path)
brwsr.open_new('groups.html')
def nameCounter(pars):
#Parameters consists of numbers to indicate how many groups of each size should be created.
# e.g. [0,8,1] will result in no students in individual groups, 8 pairs of students, and 1 group of 3.
totalNames = 0
i = 0
for item in pars:
i = i + 1
totalNames = totalNames + i*pars[i-1]
return totalNames
def createGroups(studentFile, Parameters):
listOfGroups = []
#read in student names from a file
studentNames = [line.rstrip() for line in open(studentFile)]
#return error if number of students in groups != total number students
total = nameCounter(Parameters)
if total != len(studentNames):
numStudents = len(studentNames)
print 'There are ' + str(numStudents) + ' students in total. The total number of students included in groups not equal to total number of students! Check input pars.'
else:
#shuffle student names and assemble into groups
random.shuffle(studentNames)
i = 0
curr_index = 0
num = 1
for item in Parameters:
if (item!=0):
for i in range(0,item):
temp_group = studentNames[0:num]
listOfGroups.append(temp_group)
studentNames.__delslice__(0,num)
num +=1
return listOfGroups | lmwalkowicz/StudentGroupGenerator | student_group_creator.py | Python | mit | 5,414 |
# STAB: subglacial till advection and bedforms
# Thomas E. Barchyn - University of Calgary, Calgary, AB, Canada
# Copyright 2014-2016 Thomas E. Barchyn
# Contact: Thomas E. Barchyn [tbarchyn@gmail.com]
# This project was developed with input from Thomas P.F. Dowling,
# Chris R. Stokes, and Chris H. Hugenholtz. We would appreciate
# citation of the relavent publications.
# Barchyn, T. E., T. P. F. Dowling, C. R. Stokes, and C. H. Hugenholtz (2016),
# Subglacial bed form morphology controlled by ice speed and sediment thickness,
# Geophys. Res. Lett., 43, doi:10.1002/2016GL069558
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# Please familiarize yourself with the license of this tool, available
# in the distribution with the filename: /docs/license.txt
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Generic setup and operations console for the STAB model
# This code is called from a file called 'stab_*.py', which is placed where the model
# runs are to take place. The * represents the version number.
# set directories
this_dir = os.getcwd()
operations_repository = os.path.join (repository, 'operations')
core_repository = os.path.join (repository, 'bin')
imager_repository = os.path.join (repository, 'imager')
program_localdir = os.path.join (this_dir, 'stab')
operations_localdir = os.path.join (program_localdir, 'operations')
core_localdir = os.path.join (program_localdir, 'bin')
imager_localdir = os.path.join (program_localdir, 'imager')
# processing directories
pending_dir = os.path.join (this_dir, '1_pending')
processing_dir = os.path.join (this_dir, '2_processing')
finished_dir = os.path.join (this_dir, '3_finished')
static_dir = os.path.join (this_dir, 'static')
# set the imager lockfile name
imager_lockfile_name = os.path.join (imager_localdir, 'LOCKED')
# set the viewer to hires (or not)
view_hires = True
# set the globalmappe viewer
make_gm_images = False
def recompile ():
"""
Function to recompile using the python makefile in the repository
"""
python_makefile = os.path.join(repository, 'src', 'make.py')
current_working_dir = os.getcwd()
os.chdir (os.path.join (repository, 'src'))
# try to recompile the program
try:
execfile(python_makefile)
except:
print ('ERROR: compile failure')
pass
os.chdir (current_working_dir)
return
def get_fresh_code ():
"""
Function to copy down fresh code from the repository to run locally
"""
print ('Downloading a fresh version of the program . . '),
# make any directories that are missing
if not os.path.isdir (program_localdir):
os.mkdir (program_localdir)
if not os.path.isdir (operations_localdir):
os.mkdir (operations_localdir)
if not os.path.isdir (core_localdir):
os.mkdir (core_localdir)
if not os.path.isdir (imager_localdir):
os.mkdir (imager_localdir)
if not os.path.isdir (pending_dir):
os.mkdir (pending_dir)
if not os.path.isdir (processing_dir):
os.mkdir (processing_dir)
if not os.path.isdir (finished_dir):
os.mkdir (finished_dir)
if not os.path.isdir (static_dir):
os.mkdir (static_dir)
# copy down the operations code
for files in os.listdir (operations_repository):
shutil.copy2 (os.path.join(operations_repository, files), os.path.join(operations_localdir, files))
# copy down the program code
for files in os.listdir (core_repository):
shutil.copy2 (os.path.join(core_repository, files), os.path.join(core_localdir, files))
# copy down the imager code
for files in os.listdir (imager_repository):
shutil.copy2 (os.path.join(imager_repository, files), os.path.join(imager_localdir, files))
print ('complete')
return
# MAIN
if __name__ == '__main__':
# try to make a bin directory if it doesn't exist
try:
if not os.path.isdir (core_repository):
os.mkdir (core_repository)
print ('made a core repository bin directory')
except:
pass
try:
execfile (os.path.join (operations_localdir, 'operations.py'))
except:
get_fresh_code()
execfile (os.path.join (operations_localdir, 'operations.py'))
# load the other functions
execfile (os.path.join (operations_localdir, 'execute_.py'))
execfile (os.path.join (operations_localdir, 'simfile.py'))
# see if we are going to batch run
if batch_run:
# initialize a batchfile woker silently. There is a problem occasionally
# where two workers simultaneously grab a file, and the simfile copy fails
# this attempts to try again 0.1 second later if there is a failure.
tries = 0
while tries < 3:
try:
# try to init_batchfile worker, but sometimes this fails
init_batchfile_worker (run_gm_imager = make_gm_images)
break
except:
time.sleep (0.1) # sleep 0.1 of a second
tries = tries + 1 # try again
else:
# push the user prompt
while True:
print ('------------------------------------------')
print ('Hi! Welcome to the STAB model operations control panel!')
print ('Settings:')
if view_hires:
print (' > globalmapper viewer mode: high resolution')
else:
print (' > globalmapper viewer mode: low resolution')
if make_gm_images:
print (' > globalmapper auto image: yes')
else:
print (' > globalmapper auto image: no')
print ('What would you like me to do?')
print (' 1: Start a batchfile worker')
print (' 2: Execute a specific simfile')
print (' 3: Execute a specific simfile in verbose mode')
print (' 4: Create gm images for finished simulations (requires globalmapper)')
print (' 5: Interactively view specific simulation (requires globalmapper)')
print (' 6: Houseclean')
print (' 7: Get fresh code')
print (' 8: Recompile and get fresh code (requires g++)')
print (' 9: Toggle globalmapper viewer resolution')
print (' 10: Toggle globalmapper auto imager')
print (' anything else: Quit')
print ('------------------------------------------')
# get the response from the user
res = raw_input('Enter number:\n')
try:
res = int(res)
except:
print ('I do not understand . . goodbye for now')
sys.exit()
# execute the simfiles pending
if int(res) == 1:
init_batchfile_worker(run_gm_imager = make_gm_images)
# execute specific simfile
elif int(res) == 2:
res = raw_input ('Enter simfile name:\n')
execute_simfile (res, verbose = False, run_gm_imager = make_gm_images)
# execute specific simfile in verbose mode
elif int(res) == 3:
res = raw_input ('Enter simfile name:\n')
execute_simfile (res, verbose = True, run_gm_imager = make_gm_images)
# run imaging script
elif int(res) == 4:
gm_image_finalized_sims()
# interactively image specific simulation
elif int(res) == 5:
gm_image_interactive_sim()
# houseclean
elif int(res) == 6:
houseclean()
# get fresh code
elif int(res) == 7:
shutil.rmtree (program_localdir)
get_fresh_code()
# recompile and get fresh code
elif int(res) == 8:
shutil.rmtree (program_localdir)
recompile()
get_fresh_code()
# toggle gm viewer resolution
elif int(res) == 9:
if view_hires:
view_hires = False
else:
view_hires = True
# toggle gm viewer auto imager
elif int(res) == 10:
if make_gm_images:
make_gm_images = False
else:
make_gm_images = True
else:
break
| tbarchyn/STAB_1.0 | operations/control_panel.py | Python | gpl-3.0 | 9,285 |
"""
Palindrome REST Web Service
Copyright (C) 2015 Pierre Jodouin
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
from django.shortcuts import render
from django.contrib.auth.models import User
from rest_framework import viewsets, mixins
from rest_framework import permissions
from rest.permissions import IsOwnerOrReadOnly
from rest.models import *
from rest.serializers import *
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (permissions.DjangoModelPermissions,)
@list_route()
def recent_users(self, request):
recent_users = User.objects.all().order_by('-last_login') # .order('-last_login')
page = self.paginate_queryset(recent_users)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(recent_users, many=True)
return Response(serializer.data)
class GroupViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groups to be viewed or edited.
"""
queryset = Group.objects.all()
serializer_class = GroupSerializer
permission_classes = (permissions.DjangoModelPermissions,)
class MessageViewSet(mixins.CreateModelMixin,
mixins.UpdateModelMixin, # optional
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet):
"""
API endpoint that allows messages to be listed, viewed or deleted.
"""
queryset = Message.objects.all()
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
# def list(self, request, *args, **kwargs):
# """
# Get paged list of Message objects - default is 10 per page.
# """
# return super(MessageViewSet, self).list(self, request, *args, **kwargs)
#
# def create(self, request, *args, **kwargs):
# """
# Post (Create) Message object.
# """
# return super(MessageViewSet, self).create(self, request, *args, **kwargs)
#
# def retrieve(self, request, *args, **kwargs):
# """
# Get single Message object for given message Id.
# """
# return super(MessageViewSet, self).retrieve(self, request, *args, **kwargs)
#
# def update(self, request, *args, **kwargs):
# """
# Put (update) Message object, given message Id.
# """
# return super(MessageViewSet, self).update(self, request, *args, **kwargs)
#
# def partial_update(self, request, *args, **kwargs):
# """
# Patch Message object.
# """
# return super(MessageViewSet, self).partial_update(self, request, *args, **kwargs)
#
# def destroy(self, request, *args, **kwargs):
# """
# Delete Message object for given message Id.
# """
# return super(MessageViewSet, self).destroy(self, request, *args, **kwargs)
def perform_create(self, serializer):
# automatically save the logged-in user with the message
serializer.save(user=self.request.user)
| pjodouin/palindrome | rest/views.py | Python | gpl-2.0 | 4,201 |
#!/usr/bin/python
import sys
from week1 import PatternCount
from week1 import FrequentWords
from week1 import ReverseComplement
from week1 import PatternMatching
from week1 import ComputingFrequencies
from week1 import PatternToNumber, NumberToPattern
from week1 import ClumpFinding, BetterClumpFinding
def main():
methodId = int(sys.argv[1])
lines = None
with open(sys.argv[2]) as f:
lines = f.readlines()
# Pattern Count
if methodId == 1:
text = lines[0].strip()
pattern = lines[1].strip()
print PatternCount(text, pattern)
# Frequent Words
elif methodId == 2:
text = lines[0].strip()
count = int(lines[1].strip())
print ' '.join(FrequentWords(text, count))
# Reverse Complement
elif methodId == 3:
pattern = lines[0].strip()
print ReverseComplement(pattern)
# Pattern Matching
elif methodId == 4:
pattern = lines[0].strip()
genome = lines[0].strip()
print ' '.join(map(str, PatternMatching(pattern, genome)))
# Computing Frequencies
elif methodId == 5:
text = lines[0].strip()
k = int(lines[1].strip())
print ' '.join(map(str, ComputingFrequencies(text, k)))
# Computing Frequencies - PatternToNumber
elif methodId == 501:
pattern = lines[0].strip()
print PatternToNumber(pattern)
# Computing Frequencies - NumberToPattern
elif methodId == 502:
pattern = int(lines[0].strip())
length = int(lines[1].strip())
print NumberToPattern(pattern, length)
# Clump Finding
elif methodId == 6:
genome = lines[0].strip()
numbers = lines[1].strip()
k, L, t = numbers.split()
print ' '.join(map(str, BetterClumpFinding(genome, int(k), int(L), int(t))))
if __name__ == '__main__':
main()
| abhyasi/coursera | bioinformatics_i/week1/week1_client.py | Python | mit | 1,853 |
#!/usr/bin/env python
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unittest for helpers module."""
import sys
import unittest
import _helpers
from gflags.flags_modules_for_testing import module_bar
from gflags.flags_modules_for_testing import module_foo
class FlagSuggestionTest(unittest.TestCase):
def setUp(self):
self.longopts = [
'fsplit-ivs-in-unroller=',
'fsplit-wide-types=',
'fstack-protector=',
'fstack-protector-all=',
'fstrict-aliasing=',
'fstrict-overflow=',
'fthread-jumps=',
'ftracer',
'ftree-bit-ccp',
'ftree-builtin-call-dce',
'ftree-ccp',
'ftree-ch']
def testDamerauLevenshteinId(self):
self.assertEqual(0, _helpers._DamerauLevenshtein('asdf', 'asdf'))
def testDamerauLevenshteinEmpty(self):
self.assertEqual(5, _helpers._DamerauLevenshtein('', 'kites'))
self.assertEqual(6, _helpers._DamerauLevenshtein('kitten', ''))
def testDamerauLevenshteinCommutative(self):
self.assertEqual(2, _helpers._DamerauLevenshtein('kitten', 'kites'))
self.assertEqual(2, _helpers._DamerauLevenshtein('kites', 'kitten'))
def testDamerauLevenshteinTransposition(self):
self.assertEqual(1, _helpers._DamerauLevenshtein('kitten', 'ktiten'))
def testMispelledSuggestions(self):
suggestions = _helpers.GetFlagSuggestions('fstack_protector_all',
self.longopts)
self.assertEqual(['fstack-protector-all'], suggestions)
def testAmbiguousPrefixSuggestion(self):
suggestions = _helpers.GetFlagSuggestions('fstack', self.longopts)
self.assertEqual(['fstack-protector', 'fstack-protector-all'], suggestions)
def testMisspelledAmbiguousPrefixSuggestion(self):
suggestions = _helpers.GetFlagSuggestions('stack', self.longopts)
self.assertEqual(['fstack-protector', 'fstack-protector-all'], suggestions)
def testCrazySuggestion(self):
suggestions = _helpers.GetFlagSuggestions('asdfasdgasdfa', self.longopts)
self.assertEqual([], suggestions)
class GetCallingModuleTest(unittest.TestCase):
"""Test whether we correctly determine the module which defines the flag."""
def testGetCallingModule(self):
self.assertEqual(_helpers.GetCallingModule(), sys.argv[0])
self.assertEqual(
module_foo.GetModuleName(),
'gflags.flags_modules_for_testing.module_foo')
self.assertEqual(
module_bar.GetModuleName(),
'gflags.flags_modules_for_testing.module_bar')
# We execute the following exec statements for their side-effect
# (i.e., not raising an error). They emphasize the case that not
# all code resides in one of the imported modules: Python is a
# really dynamic language, where we can dynamically construct some
# code and execute it.
code = ('import _helpers\n'
'module_name = _helpers.GetCallingModule()')
exec(code) # pylint: disable=exec-used
# Next two exec statements executes code with a global environment
# that is different from the global environment of any imported
# module.
exec(code, {}) # pylint: disable=exec-used
# vars(self) returns a dictionary corresponding to the symbol
# table of the self object. dict(...) makes a distinct copy of
# this dictionary, such that any new symbol definition by the
# exec-ed code (e.g., import flags, module_name = ...) does not
# affect the symbol table of self.
exec(code, dict(vars(self))) # pylint: disable=exec-used
# Next test is actually more involved: it checks not only that
# GetCallingModule does not crash inside exec code, it also checks
# that it returns the expected value: the code executed via exec
# code is treated as being executed by the current module. We
# check it twice: first time by executing exec from the main
# module, second time by executing it from module_bar.
global_dict = {}
exec(code, global_dict) # pylint: disable=exec-used
self.assertEqual(global_dict['module_name'],
sys.argv[0])
global_dict = {}
module_bar.ExecuteCode(code, global_dict)
self.assertEqual(
global_dict['module_name'],
'gflags.flags_modules_for_testing.module_bar')
def testGetCallingModuleWithIteritemsError(self):
# This test checks that GetCallingModule is using
# sys.modules.items(), instead of .iteritems().
orig_sys_modules = sys.modules
# Mock sys.modules: simulates error produced by importing a module
# in paralel with our iteration over sys.modules.iteritems().
class SysModulesMock(dict):
def __init__(self, original_content):
dict.__init__(self, original_content)
def iteritems(self):
# Any dictionary method is fine, but not .iteritems().
raise RuntimeError('dictionary changed size during iteration')
sys.modules = SysModulesMock(orig_sys_modules)
try:
# _GetCallingModule should still work as expected:
self.assertEqual(_helpers.GetCallingModule(), sys.argv[0])
self.assertEqual(
module_foo.GetModuleName(),
'gflags.flags_modules_for_testing.module_foo')
finally:
sys.modules = orig_sys_modules
class IsRunningTestTest(unittest.TestCase):
def testUnderTest(self):
self.assertTrue(_helpers.IsRunningTest())
def main():
unittest.main()
if __name__ == '__main__':
main()
| damienmg/bazel | third_party/py/gflags/gflags/_helpers_test.py | Python | apache-2.0 | 6,904 |
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Defines operators used in SQL expressions."""
from operator import and_, or_, inv, add, mul, sub, div, mod, truediv, \
lt, le, ne, gt, ge, eq
from sqlalchemy.util import Set, symbol
def from_():
raise NotImplementedError()
def as_():
raise NotImplementedError()
def exists():
raise NotImplementedError()
def is_():
raise NotImplementedError()
def isnot():
raise NotImplementedError()
def collate():
raise NotImplementedError()
def op(a, opstring, b):
return a.op(opstring)(b)
def like_op(a, b, escape=None):
return a.like(b, escape=escape)
def notlike_op(a, b, escape=None):
raise NotImplementedError()
def ilike_op(a, b, escape=None):
return a.ilike(b, escape=escape)
def notilike_op(a, b, escape=None):
raise NotImplementedError()
def between_op(a, b, c):
return a.between(b, c)
def in_op(a, b):
return a.in_(*b)
def notin_op(a, b):
raise NotImplementedError()
def distinct_op(a):
return a.distinct()
def startswith_op(a, b, escape=None):
return a.startswith(b, escape=escape)
def endswith_op(a, b, escape=None):
return a.endswith(b, escape=escape)
def contains_op(a, b, escape=None):
return a.contains(b, escape=escape)
def comma_op(a, b):
raise NotImplementedError()
def concat_op(a, b):
return a.concat(b)
def desc_op(a):
return a.desc()
def asc_op(a):
return a.asc()
_commutative = Set([eq, ne, add, mul])
def is_commutative(op):
return op in _commutative
_smallest = symbol('_smallest')
_largest = symbol('_largest')
_PRECEDENCE = {
from_:15,
mul:7,
div:7,
mod:7,
add:6,
sub:6,
concat_op:6,
ilike_op:5,
notilike_op:5,
like_op:5,
notlike_op:5,
in_op:5,
notin_op:5,
is_:5,
isnot:5,
eq:5,
ne:5,
gt:5,
lt:5,
ge:5,
le:5,
between_op:5,
distinct_op:5,
inv:5,
and_:3,
or_:2,
comma_op:-1,
collate: -2,
as_:-1,
exists:0,
_smallest: -1000,
_largest: 1000
}
def is_precedent(operator, against):
return _PRECEDENCE.get(operator, _PRECEDENCE[_smallest]) <= _PRECEDENCE.get(against, _PRECEDENCE[_largest])
| chirpradio/chirpradio-volunteers | site-packages/sqlalchemy/sql/operators.py | Python | mit | 2,286 |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from parlai.utils.testing import AutoTeacherTest
class TestDefaultTeacher(AutoTeacherTest):
task = "hotpotqa"
class TestDistractorTeacher(AutoTeacherTest):
task = "hotpotqa:distractor"
class TestFullwikiTeacher(AutoTeacherTest):
task = "hotpotqa:fullwiki"
| facebookresearch/ParlAI | parlai/tasks/hotpotqa/test.py | Python | mit | 474 |
"""
"""
import widget
class Form(widget.Widget):
"""A form that automatically will contain all named widgets.
After a form is created, all named widget that are subsequently created are
added to that form. You may use dict style access to access named widgets.
Example:
f = gui.Form()
w = gui.Input("Phil",name="firstname")
w = gui.Input("Hassey",name="lastname")
print(f.results())
print('')
print(f.items())
print('')
print(f['firstname'].value)
print(f['lastname'].value)
"""
# The current form instance
form = None
# The list of PGU widgets that are tracked by this form
_elist = None
# A mapping of PGU widgets tracked by this form (name -> instance)
_emap = None
# The dirty flag is set when a new widget is added to the form
_dirty = 0
def __init__(self):
widget.Widget.__init__(self,decorate=False)
self._elist = []
self._emap = {}
self._dirty = 0
# Register this form as the one used by new widgets
Form.form = self
def add(self,e,name=None,value=None):
"""Adds a PGU widget to this form"""
if name != None: e.name = name
if value != None: e.value = value
self._elist.append(e)
self._dirty = 1
def _clean(self):
# Remove elements from our list if they no longer have an assigned name
for e in self._elist[:]:
if not hasattr(e,'name') or e.name == None:
self._elist.remove(e)
# Update the name-to-widget mapping
self._emap = {}
for e in self._elist:
self._emap[e.name] = e
self._dirty = 0
def __getitem__(self,k):
"""Returns the widget instance given the name of the widget"""
if self._dirty: self._clean()
return self._emap[k]
def __contains__(self,k):
"""Returns true if this form contains the named widget"""
if self._dirty: self._clean()
if k in self._emap: return True
return False
def results(self):
"""Return a dict of name, widget-value pairs."""
if self._dirty: self._clean()
r = {}
for e in self._elist:
# Make sure the widget has a 'value' (eg tables do not)
if (hasattr(e, "value")):
r[e.name] = e.value
else:
r[e.name] = None
return r
def items(self):
"""Return a list of name, widget pairs."""
return self.results().items()
| bendmorris/jython-pgu | pgu/gui/form.py | Python | lgpl-2.1 | 2,629 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from dashboard import email_template
class EmailTemplateTest(unittest.TestCase):
def testURLEncoding(self):
actual_output = email_template.GetReportPageLink(
'ABC/bot-name/abc-perf-test/passed%', '1415919839')
self.assertEquals(('https://chromeperf.appspot.com/report?masters=ABC&'
'bots=bot-name&tests=abc-perf-test%2Fpassed%25'
'&checked=passed%25%2Cpassed%25_ref%2Cref&'
'rev=1415919839'), actual_output)
actual_output_no_host = email_template.GetReportPageLink(
'ABC/bot-name/abc-perf-test/passed%', '1415919839',
add_protocol_and_host=False)
self.assertEquals(('/report?masters=ABC&bots=bot-name&tests='
'abc-perf-test%2Fpassed%25&checked=passed%25%2C'
'passed%25_ref%2Cref&rev=1415919839'),
actual_output_no_host)
if __name__ == '__main__':
unittest.main()
| modulexcite/catapult | dashboard/dashboard/email_template_test.py | Python | bsd-3-clause | 1,132 |
# -*- coding: utf-8 -*-
# Copyright 2014 Jean-Francois Paris
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from zopaclient.api import *
__version__ = "2.0.0" | jfparis/ZopaClient | zopaclient/__init__.py | Python | lgpl-3.0 | 811 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from shrls import app
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| demophoon/shrls | app.py | Python | mit | 137 |
'''
author : "George Profenza"
url : ("disturb", "disturbmedia.com/blog","My blog, http://tomaterial.blogspot.com")
Export meshes the three.js 3D Engine by mr.doob's et al.
More details on the engine here:
https://github.com/mrdoob/three.js
Currently supports UVs. If the model doesn't display correctly
you might need to reverse some normals/do some cleanup.
Also, if you use Selection Tags and basic ColorMaterials,
the colours will be picked up as face colors. Call autoColor() on the
model you use for this.
The mesh transformations(position, rotation, scale) are saved
and you can get them using: getPosition(), getRotation() and getScale()
each returning a THREE.Vector3
In short
var myGeom = new myC4DGeom();
var myModel = new THREE.Mesh( myGeom, new THREE.MeshFaceMaterial());
//set transforms
model.position = myGeom.getPosition()
model.rotation = myGeom.getRotation()
model.scale = myGeom.getScale()
//set selection tags colours
myGeom.autoColor()
More details on this exporter and more js examples here:
https://github.com/orgicus/three.js
Have fun!
This script requires Cinema 4D R11.5 minimum and the Py4D Plugin:
http://www.py4d.com/get-py4d/
'''
import c4d
from c4d import documents, UVWTag, storage, plugins, gui, modules, bitmaps, utils
from c4d.utils import *
''' from c4d import symbols as sy, plugins, utils, bitmaps, gui '''
import math
import re
# utils
clean = lambda varStr: re.sub('\W|^(?=\d)','_', varStr)
# from Active State's Python recipies: http://code.activestate.com/recipes/266466-html-colors-tofrom-rgb-tuples/
def RGBToHTMLColor(rgb_tuple):
return '0x%02x%02x%02x' % rgb_tuple
def Export():
if not op: return
if op.GetType() != 5100:
print 'Selected Object is not an editable mesh'
return
unit = 0.001#for scale
fps = doc.GetFps()
bd = doc.GetRenderBaseDraw()
scr = bd.GetFrameScreen()
rd = doc.GetActiveRenderData()
name = op.GetName()
classname = clean(name)
c4dPath = c4d.storage.GeGetC4DPath(c4d.C4D_PATH_LIBRARY)
jsFile = open(c4dPath+'/scripts/Three.js','r')
js = jsFile.read()
htmlFile = open(c4dPath+'/scripts/template.html','r')
html = htmlFile.read()
html = html.replace('%s',classname)
code = 'var %s = function () {\n\n\tvar scope = this;\n\n\tTHREE.Geometry.call(this);\n\n' % classname
def GetMesh(code):
# goto 0
doc.SetTime(c4d.BaseTime(0, fps))
c4d.DrawViews( c4d.DA_ONLY_ACTIVE_VIEW|c4d.DA_NO_THREAD|c4d.DA_NO_REDUCTION|c4d.DA_STATICBREAK )
c4d.GeSyncMessage(c4d.EVMSG_TIMECHANGED)
doc.SetTime(doc.GetTime())
c4d.EventAdd(c4d.EVENT_ANIMATE)
SendModelingCommand(command = c4d.MCOMMAND_REVERSENORMALS, list = [op], mode = c4d.MODIFY_ALL, bc = c4d.BaseContainer(), doc = doc)
verts = op.GetAllPoints()
for v in verts:
code += '\tv( %.6f, %.6f, %.6f );\n' % (v.x, -v.y, v.z)
code += '\n'
ncount = 0
uvcount = 0
faces = op.GetAllPolygons()
normals = op.CreatePhongNormals()
ndirection = 1
hasUV = False
for tag in op.GetTags():
if tag.GetName() == "UVW":
uvw = tag
hasUV = True
for f in faces:
if(f.d == f.c):
if(normals):
code += '\tf3( %d, %d, %d, %.6f, %.6f, %.6f );\n' % (f.a, f.b, f.c, normals[ncount].x*ndirection, normals[ncount].y*ndirection, normals[ncount].z*ndirection)
else:
code += '\tf3( %d, %d, %d );\n' % (f.a, f.b, f.c)
else:
if(normals):
code += '\tf4( %d, %d, %d, %d, %.6f, %.6f, %.6f );\n' % (f.a, f.b, f.c, f.d, normals[ncount].x*ndirection, normals[ncount].y*ndirection, normals[ncount].z*ndirection)
else:
code += '\tf4( %d, %d, %d, %d );\n' % (f.a, f.b, f.c, f.d)
if hasUV:
uv = uvw.GetSlow(uvcount);
# uvs += '[Vector('+str(uv[0].x)+','+str(1.0-uv[0].y)+'),Vector('+str(uv[1].x)+','+str(1.0-uv[1].y)+'),Vector('+str(uv[2].x)+','+str(1.0-uv[2].y)+')],'
if len(uv) == 4:
# {'a': Vector(1, 1, 0), 'c': Vector(0, 0, 0), 'b': Vector(1, 0, 0), 'd': Vector(0, 1, 0)}
code += '\tuv( %.6f, %.6f, %.6f, %.6f, %.6f, %.6f, %.6f, %.6f);\n' % (uv['a'].x, uv['a'].y, uv['b'].x, uv['b'].y, uv['b'].x, uv['b'].y, uv['c'].x, uv['c'].y)
else:
code += '\tuv( %.6f, %.6f, %.6f, %.6f, %.6f, %.6f);\n' % (uv['a'].x, uv['a'].y, uv['b'].x, uv['b'].y, uv['c'].x, uv['c'].y)
ncount += 1
uvcount += 1
code +='\n\tthis.computeCentroids();\n\tthis.computeNormals(true);\n'
#selection color
code +='\n\tscope.colors = {};\n'
code +='\tscope.selections = {};\n'
selName = ''
for tag in op.GetTags():
if(tag.GetType() == 5616): #texture tag
material = tag.GetMaterial()
color = material[c4d.MATERIAL_COLOR_COLOR]
tag.SetBit(c4d.BIT_ACTIVE)
selName = clean(tag[c4d.TEXTURETAG_RESTRICTION])
if len(selName) == 0: print "*** WARNING! *** Missing selection name for material: " + material.GetName()
code += '\tscope.colors["'+selName+'"] = '+str(RGBToHTMLColor((color.x*255,color.y*255,color.z*255)))+';\n'
if tag.GetType() == 5673: #selection tag
print 'selection: ' + tag.GetName()
print 'selection object: ' + tag
sel = tag.GetSelection()
selName = clean(tag.GetName())
ids = sel.GetAll(op.GetPointCount())
indices = [i for i, e in enumerate(ids) if e != 0]
code += '\tscope.selections["'+selName+'"] = '+str(indices)+';\n'
code += '\n\tscope.autoColor = function(){\n'
code += '\t\tfor(var s in this.selections){\n'
code += '\t\t\tfor(var i = 0 ; i < this.selections[s].length; i++) this.faces[this.selections[s][i]].material = [new THREE.MeshBasicMaterial({color:this.colors[s]})];\n'
code += '\t\t}\n\t}\n'
# model position, rotation, scale rotation x,y,z = H,P,B => three.js x,y,z is P,H,B => y,x,z
p = op.GetPos()
r = op.GetRot()
s = op.GetScale()
code += '\n\tscope.getPosition = function(){\treturn new THREE.Vector3'+str((p.x,p.y,p.z))+';\t}\n'
code += '\n\tscope.getRotation = function(){\treturn new THREE.Vector3'+str((r.y,r.x,r.z))+';\t}\n'
code += '\n\tscope.getScale = function(){\treturn new THREE.Vector3'+str((s.x,s.y,s.z))+';\t}\n'
code += '\n'
code += '\tfunction v( x, y, z ) {\n\n'
code += '\t\tscope.vertices.push( new THREE.Vertex( new THREE.Vector3( x, y, z ) ) );\n\n'
code += '\t}\n\n'
code += '\tfunction f3( a, b, c, nx, ny, nz ) {\n\n'
code += '\t\tscope.faces.push( new THREE.Face3( a, b, c, nx && ny && nz ? new THREE.Vector3( nx, ny, nz ) : null ) );\n\n'
code += '\t}\n\n'
code += '\tfunction f4( a, b, c, d, nx, ny, nz ) {\n\n'
code += '\t\tscope.faces.push( new THREE.Face4( a, b, c, d, nx && ny && nz ? new THREE.Vector3( nx, ny, nz ) : null ) );\n\n'
code += '\t}\n\n'
code += '\tfunction uv( u1, v1, u2, v2, u3, v3, u4, v4 ) {\n\n'
code += '\t\tvar uv = [];\n'
code += '\t\tuv.push( new THREE.UV( u1, v1 ) );\n'
code += '\t\tuv.push( new THREE.UV( u2, v2 ) );\n'
code += '\t\tuv.push( new THREE.UV( u3, v3 ) );\n'
code += '\t\tif ( u4 && v4 ) uv.push( new THREE.UV( u4, v4 ) );\n'
code += '\t\tscope.uvs.push( uv );\n'
code += '\t}\n\n'
code += '}\n\n'
code += '%s.prototype = new THREE.Geometry();\n' % classname
code += '%s.prototype.constructor = %s;' % (classname, classname)
SendModelingCommand(command = MCOMMAND_REVERSENORMALS, list = [op], mode = MODIFY_ALL, bc = c4d.BaseContainer(), doc = doc)
return code
code = GetMesh(code)
docPath = doc.GetDocumentPath()
jspath = docPath+'/'+classname+'.js'
htmlpath = docPath+'/'+classname+'.html'
file = open(jspath,'w')
file.write(code)
file.close()
file = open(htmlpath,'w')
file.write(html)
file.close()
file = open(docPath+'/Three.js','w')
file.write(js)
file.close()
print 'Export Complete!'
Export() | flyingoctopus/three.js | utils/exporters/cinema4d/export_to_three.js.py | Python | mit | 8,574 |
"""
Write a program that will help you play poker by telling you what kind of hand you have.
Input:
The first line of input contains the number of test cases (no more than 20). Each test case consists of one line - five
space separated cards. Each card is represented by a two-letter (or digit) word. The first character is the rank
(A,K,Q,J,T,9,8,7,6,5,4,3 or 2), the second character is the suit (S,H,D,C standing for spades, hearts, diamonds and
clubs). The cards can be in any order (but they will not repeat).
Output:
For each test case output one line describing the type of a hand, exactly like in the list above.
"""
rank = ['A', 'K', 'Q', 'J', 'T', '9', '8', '7', '6', '5', '4', '3', '2']
suit = ['S', 'H', 'D', 'C']
def validate(val):
if len(val) != 5:
return False
for v in val:
if v[0] not in rank or v[1] not in suit:
return False
return True
def deck_sort(inp):
d = {'A': 0, 'K': 1, 'Q': 2, 'J': 3, 'T': 4, '9': 5, '8': 6, '7': 7, '6': 8, '5': 9, '4': 10, '3': 11, '2': 12}
return sorted(inp, key=lambda x: d[x[0]])
def same_suit(inp):
for i in inp:
if not i[1] == inp[0][1]:
return False
return True
def same_rank(inp):
for i in inp:
if i[0] != inp[0][0]:
return False
return True
def consecutive(inp):
nxt = ''
for i in inp:
if not nxt:
nxt = rank.index(i[0]) + 1
elif rank[nxt] == i[0]:
nxt = rank.index(i[0]) + 1
else:
return False
return True
def test(inp):
if royal_flush(inp):
print('Royal Flush')
elif straight_flush(inp):
print('Straight Flush')
elif four_of_a_kind(inp):
print('Four of a Kind')
elif full_house(inp):
print('Full House')
elif flush(inp):
print('Flush')
elif straight(inp):
print('Straight')
elif three_of_a_kind(inp):
print('Three of a Kind')
elif two_pair(inp):
print('Two Pair')
elif one_pair(inp):
print('One Pair')
else:
print('"High" Card')
def straight_flush(inp):
return same_suit(inp) and consecutive(inp)
def royal_flush(inp):
return straight_flush(inp) and inp[0][0] == 'A'
def four_of_a_kind(inp):
return (same_rank(inp[:4])) or \
(same_rank(inp[1:]))
def full_house(inp):
return (same_rank(inp[:3]) and same_rank(inp[3:])) or \
(same_rank(inp[:2]) and same_rank(inp[2:]))
def flush(inp):
return same_suit(inp)
def straight(inp):
return consecutive(inp)
def three_of_a_kind(inp):
return (same_rank(inp[0:3])) or \
(same_rank(inp[1:4])) or \
(same_rank(inp[2:5]))
def two_pair(inp):
return (same_rank(inp[0:2]) and same_rank(inp[2:4])) or \
(same_rank(inp[0:2]) and same_rank(inp[3:5])) or \
(same_rank(inp[1:3]) and same_rank(inp[3:5]))
def one_pair(inp):
return (same_rank(inp[0:2])) or \
(same_rank(inp[1:3])) or \
(same_rank(inp[2:4])) or \
(same_rank(inp[3:5]))
if __name__ == '__main__':
number = int(input('Number of inputs: '))
print('Please enter combinations: ')
for i in range(number):
cards = input('> ').upper().split()
if validate(cards):
test(deck_sort(cards))
else:
print('invalid input')
| DayGitH/Python-Challenges | DailyProgrammer/20120330B.py | Python | mit | 3,384 |
# Exercise 43: Basic Object-Oriented Analysis and Design
# Process to build something to evolve problems
# 1. Write or draw about the problem.
# 2. Extract key concepts from 1 and research them.
# 3. Create a class hierarchy and object map for the concepts.
# 4. Code the classes and a test to run them.
# 5. Repeat and refine.
# The Analysis of a Simple Game Engine
# Write or Draw About the Problem
"""
Aliens have invaded a space ship and our hero has to go through a maze of rooms
defeating them so he can escape into an escape pod to the planet below. The game
will be more like a Zork or Adventure type game with text outputs and funny ways
to die. The game will involve an engine that runs a map full of rooms or scenes.
Each room will print its own description when the player enters it and then tell
the engine what room to run next out of the map.
"""
# At this point I have a good idea for the game and how it would run, so now I want
# to describe each scene:
"""
Death
This is when the player dies and should be something funny.
Central Corridor
This is the starting point and has a Gothon already standing there.
They have to defeat with a joke before continuing.
Laser Weapon Armory
This is where the hero gets a neutron bomb to blow up the ship before
getting to the escape pod. It has a keypad the hero has to gues the
number for.
The Bridge
Another battle scene with a Gothon where the hero places the bomb.
Escape Pod
Where the hero escapes but only after guessing the right escape pod.
"""
# Extract Key Concepts and Research Them
# First I make a list of all the nouns:
# Alien, Player, Ship, Maze, Room, Scene, Gothon, Escape Pod, Planet, Map, Engine, Death,
# Central Corridor, Laser Weapon Armory, The Bridge
# Create a Class Hierarchy and Object Map for the Concepts
"""
Right away I see that "Room" and "Scene" are basically the same thing depending on how
I want to do things. I'm going to pick "Scene" for this game. Then I see that all the
specific rooms like "Central Corridor" are basically just Scenes. I see also that Death
is basically a Scene, which confirms my choice of "Scene" over "Room" since you can have
a death scene, but a death room is kind of odd. "Maze" and "Map" are basically the same
so I'm going to go with "Map" since I used it more often. I don't want to do a battle
system so I'm going to ignore "Alien" and "Player" and save that for later. The "Planet"
could also just be another scene instead of something specific
"""
# After all of that thoiught process I start to make a class hierarchy that looks
# like this in my text editor:
# * Map
# * Engine
# * Scene
# * Death
# * Central Corridor
# * Laser Weapon Armory
# * The Bridge
# * Escape Pod
"""
I would then go through and figure out what actions are needed on each thing based on
verbs in the description. For example, I know from the description I'm going to need a
way to "run" the engine, "get the next scene" from the map, get the "opening scene" and
"enter" a scene. I'll add those like this:
"""
# * Map
# - next_scene
# - opening_scene
# * Engine
# - play
# * Scene
# - enter
# * Death
# * Central Corridor
# * Laser Weapon Armory
# * The Bridge
# * Escape Pod
"""
Notice how I just put -enter under Scene since I know that all the scenes under it will
inherit it and have to override it later.
"""
# Code the Classes and a Test to Run Them
# The Code for "Gothons from Planet Percal #25"
from sys import exit
from random import randint
class Scene(object):
def enter(self):
print "This scene is not yet configured. Subclass it and implement enter()."
exit(1)
class Engine(object):
def __init__(self, scene_map):
self.scene_map = scene_map
def play(self):
current_scene = self.scene_map.opening_scene()
last_scene = self.scene_map.next_scene('finished')
while current_scene != last_scene:
next_scene_name = current_scene.enter()
current_scene = self.scene_map.next_scene(next_scene_name)
# be sure to print out the last scene
current_scene.enter()
class Death(Scene):
quips = [
"You died. You kinda suck at this.",
"Your mom would be proud...if she were smarter.",
"Such a luser.",
"I have a small puppy that's better at this."
]
def enter(self):
print Death.quips[randint(0, len(self.quips)-1)]
exit(1)
class CentralCorridor(Scene):
def enter(self):
print "The Gothons of Planet Percal #25 have invaded your ship and destroyed"
print "your entire crew. You are the last surviving member and your last"
print "mission is to get the neutron destruct bomb from the Weapons Armory,"
print "put it in the bridge, and blow the ship up after getting into an "
print "escape pod."
print "\n"
print "You're running down the central corridor to the Weapons Armory when"
print "a Gothon jumps out, red scaly skin, dark grimy teeth, and evil clown costume"
print "flowing around his hate filled body. He's blocking the door to the"
print "Armory and about to pull a weapon to blast you."
print "What will you do?"
print ">> shoot!"
print ">> dodge!"
print ">>tell a joke"
action = raw_input("> ")
if action == "shoot!":
print "Quick on the draw you yank out your blaster and fire it at the Gothon."
print "His clown costume is flowing and moving around his body, which throws"
print "off your aim. Your laser hits his costume but misses him entirely. This"
print "completely ruins his brand new costume his mother bought him, which"
print "makes him fly into an insane rage and blast you repeatedly in the face until"
print "you are dead. Then he eats you."
return 'death'
elif action == "dodge!":
print "Like a world class boxer you dodge, weave, slip and slide right"
print "as the Gothon's blaster cranks a laser past your head."
print "In the middle of your artful dodge your foot slips and you"
print "bang your head on the metal wall and pass out."
print "You wake up shortly after only to die as the Gothon stomps on"
print "your head and eats you."
return 'death'
elif action == "tell a joke":
print "Lucky for you they made you learn Gothon insults in the academy."
print "You tell the one Gothon joke you know: "
print "Lbhe zbgure vf fb sng, jura fur fvgf nebhaq gur ubhfr, fur fvgf nebhaq gur ubhfr."
print "The Gothon stops, tries not to laugh, then busts out laughing and can't move."
print "While he's laughing you run up and shoot him square in the head"
print "putting him down, then jump through the Weapon Armory door."
return 'laser_weapon_armory'
else:
print "DOES NOT COMPUTE!"
return 'central_corridor'
class LaserWeaponArmory(Scene):
def enter(self):
print "You do a dive roll into the Weapon Armory, crouch and scan the room"
print "for more Gothons that might be hiding. It's dead quiet, too quiet."
print "You stand up and run to the far side of the room and find the"
print "neutron bomb in its container. There's a keypad lock on the box"
print "and you need the code to get the bomb out. If you get the code"
print "wrong 10 times then the lock closes forever and you can't"
print "get the bomb. The code is 3 digits."
code = "%d%d%d" % (randint(1,9), randint(1,9), randint(1,9))
print "This is the code: %s." % code
guess = raw_input("[keypad]> ")
guesses = 0
while guess != code and guesses < 10:
print "BZZZZEDDD!"
guesses += 1
guess = raw_input("[keypad]> ")
if guess == code:
print "The container clicks open and the seal breaks, letting gas out."
print "You grab the neutron bomb and run as fast as you can to the"
print "bridge where you must place it in the right spot."
return 'the_bridge'
else:
print "The lock buzzes one last time and then you hear a sickening"
print "melting sound as the mechanism is fused together."
print "You decide to sit there, and finally the Gothons blow up the"
print "ship from their ship and you die."
return 'death'
class TheBridge(Scene):
def enter(self):
print "You burst onto the Bridge with the netron destruct bomb"
print "under your arm and surprise 5 Gothons who are trying to"
print "take control of the ship. Each of them has an even uglier"
print "clown costume than the last. They haven't pulled their"
print "weapons out yet, as they see the active bomb under your"
print "arm and don't want to set it off."
print "What will you do?"
print ">> throw the bomb"
print ">>slowly place the bomb"
action = raw_input("> ")
if action == "throw the bomb":
print "In a panic you throw the bomb at the group of Gothons"
print "and make a leap for the door. Right as you drop it a"
print "Gothon shoots you right in the back killing you."
print "As you die you see another Gothon frantically try to disarm"
print "the bomb. You die knowing they will probably blow up when"
print "it goes off."
return 'death'
elif action == "slowly place the bomb":
print "You point your blaster at the bomb under your arm"
print "and the Gothons put their hands up and start to sweat."
print "You inch backward to the door, open it, and then carefully"
print "place the bomb on the floor, pointing your blaster at it."
print "You then jump back through the door, punch the close button"
print "and blast the lock so the Gothons can't get out."
print "Now that the bomb is placed you run to the escape pod to"
print "get off this tin can."
return 'escape_pod'
else:
print "DOES NOT COMPUTE!"
return "the_bridge"
class EscapePod(Scene):
def enter(self):
print "You rush through the ship desperately trying to make it to"
print "the escape pod before the whole ship explodes. It seems like"
print "hardly any Gothons are on the ship, so your run is clear of"
print "interference. You get to the chamber with the escape pods, and"
print "now need to pick one to take. Some of them could be damaged"
print "but you don't have time to look. There's 5 pods, which one"
print "do you take?"
good_pod = randint(1,5)
print "Fast look tells you %s is good." % good_pod
guess = raw_input("[pod #]> ")
if int(guess) != good_pod:
print "You jump into pod %s and hit the eject button." % guess
print "The pod escapes out into the void of space, then"
print "implodes as the hull ruptures, crushing your body"
print "into jam jelly."
return 'death'
else:
print "You jump into pod %s and hit the eject button." % guess
print "The pod easily slides out into space heading to"
print "the planet below. As it flies to the planet, you look"
print "back and see your ship implode then explode like a"
print "bright star, taking out the Gothon ship at the same"
print "time. You won!"
return 'finished'
class Finished(Scene):
def enter(self):
print "You won! Good job."
return 'finished'
class Map(object):
scenes = {
'central_corridor': CentralCorridor(),
'laser_weapon_armory': LaserWeaponArmory(),
'the_bridge': TheBridge(),
'escape_pod': EscapePod(),
'death': Death(),
'finished': Finished(),
}
def __init__(self, start_scene):
self.start_scene = start_scene
def next_scene(self, scene_name):
val = Map.scenes.get(scene_name)
return val
def opening_scene(self):
return self.next_scene(self.start_scene)
a_map = Map('central_corridor')
a_game = Engine(a_map)
a_game.play()
# Top Down vs Bottom Up
# Steps to do Bottom Up:
# 1. Take a small piece of the problem; hack on some code and get it to run barely.
# 2. Refine the code into something more formal with classes and automated tests.
# 3. Extract the key concepts you're using and try to find research for them.
# 4. Write a description of what's really going on.
# 5. Go back and refine the code, possibly throwing it out and starting over.
# 6. Repeat, moving on to some other piece of the problem.
# Study Drills:
# 1. Change it! Maybe you hate this game. Could be to violent, you aren't into sci-fi. Get the game
# working, then change it to what you like. This is your computer, you make it do what you want.
# 2. I have a bug in this code. Why is the door lock guessing 11 times?
# 3. Explain how returning the next room works.
# 4. Add cheat codes to the game so you can get past the more difficult rooms. I can do this with
# two words on one line.
# 5. Go back to my description and analysis, then try to build a small combat system for the hero
# and the various Gothons he encounters.
# 6. This is actually a small version of something called a "finite state machine". Read about them.
# They might not make sense but try anyway.
| Valka7a/python-playground | python-the-hard-way/43-basic-object-oriented-analysis-and-design.py | Python | mit | 13,790 |
#!/usr/bin/python
#
# Copyright (C) 2012 Michael Spreitzenbarth, Sven Schmitt
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
import datetime
import sys
import hashlib
import _adel_log
import _exif
def get_exif_information(backup_dir, outputFile):
picture_position_list = []
picture_dir = backup_dir + "/pictures/"
listing = os.listdir(picture_dir)
for file in listing:
latitude = ""
longitude = ""
readtime = ""
try:
f = open(picture_dir + file, 'rb')
tags = _exif.process_file(f)
for tag in tags.keys():
if tag in ('GPS GPSLatitude'):
lat = str(tags[tag])
lat1 = lat.split(",")[2].split("]")[0].split(" ")[-1]
if "/" in lat1:
lat1_1 = float(lat1.split("/")[0])
lat1_2 = float(lat1.split("/")[1])
lat1 = lat1_1 / lat1_2
else:
lat1 = float(lat1)
lat2 = float(lat.split(",")[1].split(" ")[-1])
lat3 = float(lat.split(",")[0].split("[")[1])
latitude = ((((lat1 / 60) + lat2) / 60) + lat3)
if tag in ('GPS GPSLongitude'):
long = str(tags[tag])
long1 = long.split(",")[2].split("]")[0].split(" ")[-1]
if "/" in long1:
long1_1 = float(long1.split("/")[0])
long1_2 = float(long1.split("/")[1])
long1 = long1_1 / long1_2
else:
long1 = float(long1)
long2 = float(long.split(",")[1].split(" ")[-1])
long3 = float(long.split(",")[0].split("[")[1])
longitude = ((((long1 / 60) + long2) / 60) + long3)
if tag in ('EXIF DateTimeOriginal'):
readtime = str(tags[tag])
readtime = readtime.split(":")[1] + "/" + readtime.split(":")[2].split(" ")[0] + "/" + readtime.split(":")[0].split("0")[1] + " " + readtime.split(" ")[1]
except:
continue
if ((latitude != "") & (longitude != "")):
picture_position_list.append([file, str(latitude), str(longitude), "500", readtime])
outputFile.write('%25s %7d %5d %10s %10s %s \n' % ("JPEG", 500, 0, latitude, longitude, str(readtime)))
return picture_position_list
| draekko/ADEL | _getEXIF.py | Python | gpl-3.0 | 3,136 |
"""Get a clinical report, either with extended information or not.
Example usages:
python get_clinical_report.py 1801
python get_clinical_report.py 1801 --e true
"""
import os
import requests
from requests.auth import HTTPBasicAuth
import sys
import simplejson as json
import argparse
# Load environment variables for request authentication parameters
if "FABRIC_API_PASSWORD" not in os.environ:
sys.exit("FABRIC_API_PASSWORD environment variable missing")
if "FABRIC_API_LOGIN" not in os.environ:
sys.exit("FABRIC_API_LOGIN environment variable missing")
FABRIC_API_LOGIN = os.environ['FABRIC_API_LOGIN']
FABRIC_API_PASSWORD = os.environ['FABRIC_API_PASSWORD']
FABRIC_API_URL = os.environ.get('FABRIC_API_URL', 'https://api.fabricgenomics.com')
auth = HTTPBasicAuth(FABRIC_API_LOGIN, FABRIC_API_PASSWORD)
def get_clinical_report(cr_id, extended=False):
"""Use the Omicia API to get a clinical report, either with or without
extended variant and fields information
"""
# Construct request
url = "{}/reports/{}/"
if extended:
url += "?extended=True"
url = url.format(FABRIC_API_URL, cr_id)
sys.stdout.flush()
result = requests.get(url, auth=auth)
return result.json()
def main():
"""Main function. Get a clinical report by ID.
"""
parser = argparse.ArgumentParser(description='Fetch a clinical report')
parser.add_argument('c', metavar='clinical_report_id', type=int)
parser.add_argument('--e', metavar='extended', type=bool, default=False)
args = parser.parse_args()
cr_id = args.c
extended = args.e
json_response = get_clinical_report(cr_id, extended=extended)
sys.stdout.write(json.dumps(json_response, indent=4))
if __name__ == "__main__":
main()
| Omicia/omicia_api_examples | python/ClinicalReportLaunchers/get_clinical_report.py | Python | mit | 1,764 |
# RUN: %python -m artiq.compiler.testbench.jit %s >%t
# RUN: OutputCheck %s --file-to-check=%t
def f():
delay_mu(2)
def g():
with interleave:
f()
delay_mu(2)
print(now_mu())
# CHECK-L: 2
g()
| JQIamo/artiq | artiq/test/lit/interleaving/pure_impure_tie.py | Python | lgpl-3.0 | 222 |
# coding: utf-8
from ..security import passwd, passwd_check, salt_len
import nose.tools as nt
def test_passwd_structure():
p = passwd('passphrase')
algorithm, salt, hashed = p.split(':')
nt.assert_equal(algorithm, 'sha1')
nt.assert_equal(len(salt), salt_len)
nt.assert_equal(len(hashed), 40)
def test_roundtrip():
p = passwd('passphrase')
nt.assert_equal(passwd_check(p, 'passphrase'), True)
def test_bad():
p = passwd('passphrase')
nt.assert_equal(passwd_check(p, p), False)
nt.assert_equal(passwd_check(p, 'a:b:c:d'), False)
nt.assert_equal(passwd_check(p, 'a:b'), False)
def test_passwd_check_unicode():
# GH issue #4524
phash = u'sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f'
assert passwd_check(phash, u"łe¶ŧ←↓→") | unnikrishnankgs/va | venv/lib/python3.5/site-packages/notebook/auth/tests/test_security.py | Python | bsd-2-clause | 801 |
import timeit
import sys
import numpy as np
from Lab1 import *
fin = open(sys.argv[1])
sizes = [int(i) for i in fin.readline().split()]
runs = int(fin.readline())
RANGE = 1000000 # Max size of an element (+ or -)
ITERATIONS = 100
functions = ['numpy', 'multIter', 'multDCWrapper', 'strassenWrapper'] # Name of functions to test
print("Sizes:", sizes)
print("Runs:", runs)
print("Iterations:", ITERATIONS)
print("Range: -"+ str(RANGE) +" to "+ str(RANGE) )
def setup(runs):
np.random.seed(12357890)
arrays = [(randMatr(N),randMatr(N))
for i in range(runs)]
return arrays
def numpy(n, a, b):
return np.matmul(a,b)
def multDCWrapper(n, A, B):
return multDC(n,(0,n), (0,n), (0,n), (0,n), A, B)
def strassenWrapper(n,A,B):
return multStrassen(n,(0,n), (0,n), (0,n), (0,n), A, B)
for N in sizes:
#Heading Stuff
print("\n\n" + "="*60)
print("Array Size = ", N)
print("="*60)
for func in functions:
print(func)
total=0
for i in range(runs):
#print("from __main__ import {} \narrays=setup({})".format(setup", ".join(functions), runs ))
total += timeit.timeit("{}({}, arrays[{}][0], arrays[{}][1])".format(func,N, i,i),
setup="from __main__ import setup, {} \narrays=setup({})".format(", ".join(functions), runs), number=runs)
print(total)
| Ardustorm/csc349 | lab1/Lab1Test.py | Python | gpl-3.0 | 1,413 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('p311', '0003_auto_20150323_1217'),
]
operations = [
migrations.RenameField(
model_name='commoninfo',
old_name='date',
new_name='mod_date',
),
migrations.AddField(
model_name='commoninfo',
name='doc_date',
field=models.DateField(null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='result',
name='description',
field=models.CharField(null=True, max_length=250),
preserve_default=True,
),
]
| max1k/cbs | p311/migrations/0004_auto_20150325_1306.py | Python | gpl-2.0 | 781 |
#
# (c) 2015 Peter Sprygada, <psprygada@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
Adds shared module support for connecting to and configuring Cisco
IOS devices. This shared module builds on module_utils/ssh.py and
implements the Shell object.
** Note: The order of the import statements does matter. **
from ansible.module_utils.basic import *
from ansible.module_utils.ssh import *
from ansible.module_utils.ios import *
This module provides the following common argument spec for creating
ios connections:
* enable_mode (bool) - Forces the shell connection into IOS enable mode
* enable_password (str) - Configures the IOS enable mode password to be
send to the device to authorize the session
* device (dict) - Accepts the set of configuration parameters as a
dict object
Note: These shared arguments are in addition to the arguments provided by
the module_utils/ssh.py shared module
"""
import socket
IOS_PROMPTS_RE = [
re.compile(r'[\r\n]?[a-zA-Z]{1}[a-zA-Z0-9-]*[>|#](?:\s*)$'),
re.compile(r'[\r\n]?[a-zA-Z]{1}[a-zA-Z0-9-]*\(.+\)#$'),
re.compile(r'\x1b.*$')
]
IOS_ERRORS_RE = [
re.compile(r"% ?Error"),
re.compile(r"^% \w+", re.M),
re.compile(r"% ?Bad secret"),
re.compile(r"invalid input", re.I),
re.compile(r"(?:incomplete|ambiguous) command", re.I),
re.compile(r"connection timed out", re.I),
re.compile(r"[^\r\n]+ not found", re.I),
re.compile(r"'[^']' +returned error code: ?\d+"),
]
IOS_PASSWD_RE = re.compile(r"[\r\n]?password: $", re.I)
IOS_COMMON_ARGS = dict(
host=dict(),
port=dict(type='int', default=22),
username=dict(),
password=dict(),
enable_mode=dict(default=False, type='bool'),
enable_password=dict(),
connect_timeout=dict(type='int', default=10),
device=dict()
)
def ios_module(**kwargs):
"""Append the common args to the argument_spec
"""
spec = kwargs.get('argument_spec') or dict()
argument_spec = url_argument_spec()
argument_spec.update(IOS_COMMON_ARGS)
if kwargs.get('argument_spec'):
argument_spec.update(kwargs['argument_spec'])
kwargs['argument_spec'] = argument_spec
module = AnsibleModule(**kwargs)
device = module.params.get('device') or dict()
for key, value in device.iteritems():
if key in IOS_COMMON_ARGS:
module.params[key] = value
params = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
for key, value in params.iteritems():
if key != 'device':
module.params[key] = value
return module
def to_list(arg):
"""Try to force the arg to a list object
"""
if isinstance(arg, (list, tuple)):
return list(arg)
elif arg is not None:
return [arg]
else:
return []
class IosShell(object):
def __init__(self):
self.connection = None
def connect(self, host, username, password, **kwargs):
port = kwargs.get('port') or 22
timeout = kwargs.get('timeout') or 10
self.connection = Shell()
self.connection.prompts.extend(IOS_PROMPTS_RE)
self.connection.errors.extend(IOS_ERRORS_RE)
self.connection.open(host, port=port, username=username,
password=password, timeout=timeout)
def authorize(self, passwd=None):
command = Command('enable', prompt=IOS_PASSWD_RE, response=passwd)
self.send(command)
def configure(self, commands):
commands = to_list(commands)
commands.insert(0, 'configure terminal')
commands.append('end')
resp = self.send(commands)
resp.pop(0)
resp.pop()
return resp
def send(self, commands):
responses = list()
for cmd in to_list(commands):
response = self.connection.send(cmd)
responses.append(response)
return responses
def ios_from_args(module):
"""Extracts the set of argumetns to build a valid IOS connection
"""
params = dict()
for arg, attrs in IOS_COMMON_ARGS.iteritems():
if module.params['device']:
params[arg] = module.params['device'].get(arg)
if arg not in params or module.params[arg]:
params[arg] = module.params[arg]
if params[arg] is None:
if attrs.get('required'):
module.fail_json(msg='argument %s is required' % arg)
params[arg] = attrs.get('default')
return params
def ios_connection(module):
"""Creates a connection to an IOS device based on the module arguments
"""
host = module.params['host']
port = module.params['port']
username = module.params['username']
password = module.params['password']
timeout = module.params['connect_timeout']
try:
shell = IosShell()
shell.connect(host, port=port, username=username, password=password,
timeout=timeout)
except paramiko.ssh_exception.AuthenticationException, exc:
module.fail_json(msg=exc.message)
except socket.error, exc:
module.fail_json(msg=exc.strerror, errno=exc.errno)
shell.send('terminal length 0')
if module.params['enable_mode']:
shell.authorize(module.params['enable_password'])
return shell
| atsaki/ansible | lib/ansible/module_utils/ios.py | Python | gpl-3.0 | 5,894 |
"""Test the Vultr binary sensor platform."""
import pytest
import voluptuous as vol
from homeassistant.components import vultr as base_vultr
from homeassistant.components.vultr import (
ATTR_ALLOWED_BANDWIDTH,
ATTR_AUTO_BACKUPS,
ATTR_COST_PER_MONTH,
ATTR_CREATED_AT,
ATTR_IPV4_ADDRESS,
ATTR_SUBSCRIPTION_ID,
CONF_SUBSCRIPTION,
binary_sensor as vultr,
)
from homeassistant.const import CONF_NAME, CONF_PLATFORM
from homeassistant.core import HomeAssistant
CONFIGS = [
{CONF_SUBSCRIPTION: "576965", CONF_NAME: "A Server"},
{CONF_SUBSCRIPTION: "123456", CONF_NAME: "Failed Server"},
{CONF_SUBSCRIPTION: "555555", CONF_NAME: vultr.DEFAULT_NAME},
]
@pytest.mark.usefixtures("valid_config")
def test_binary_sensor(hass: HomeAssistant):
"""Test successful instance."""
hass_devices = []
def add_entities(devices, action):
"""Mock add devices."""
for device in devices:
device.hass = hass
hass_devices.append(device)
# Setup each of our test configs
for config in CONFIGS:
vultr.setup_platform(hass, config, add_entities, None)
assert len(hass_devices) == 3
for device in hass_devices:
# Test pre data retrieval
if device.subscription == "555555":
assert device.name == "Vultr {}"
device.update()
device_attrs = device.extra_state_attributes
if device.subscription == "555555":
assert device.name == "Vultr Another Server"
if device.name == "A Server":
assert device.is_on is True
assert device.device_class == "power"
assert device.state == "on"
assert device.icon == "mdi:server"
assert device_attrs[ATTR_ALLOWED_BANDWIDTH] == "1000"
assert device_attrs[ATTR_AUTO_BACKUPS] == "yes"
assert device_attrs[ATTR_IPV4_ADDRESS] == "123.123.123.123"
assert device_attrs[ATTR_COST_PER_MONTH] == "10.05"
assert device_attrs[ATTR_CREATED_AT] == "2013-12-19 14:45:41"
assert device_attrs[ATTR_SUBSCRIPTION_ID] == "576965"
elif device.name == "Failed Server":
assert device.is_on is False
assert device.state == "off"
assert device.icon == "mdi:server-off"
assert device_attrs[ATTR_ALLOWED_BANDWIDTH] == "1000"
assert device_attrs[ATTR_AUTO_BACKUPS] == "no"
assert device_attrs[ATTR_IPV4_ADDRESS] == "192.168.100.50"
assert device_attrs[ATTR_COST_PER_MONTH] == "73.25"
assert device_attrs[ATTR_CREATED_AT] == "2014-10-13 14:45:41"
assert device_attrs[ATTR_SUBSCRIPTION_ID] == "123456"
def test_invalid_sensor_config():
"""Test config type failures."""
with pytest.raises(vol.Invalid): # No subs
vultr.PLATFORM_SCHEMA({CONF_PLATFORM: base_vultr.DOMAIN})
@pytest.mark.usefixtures("valid_config")
def test_invalid_sensors(hass: HomeAssistant):
"""Test the VultrBinarySensor fails."""
hass_devices = []
def add_entities(devices, action):
"""Mock add devices."""
for device in devices:
device.hass = hass
hass_devices.append(device)
bad_conf = {} # No subscription
vultr.setup_platform(hass, bad_conf, add_entities, None)
bad_conf = {
CONF_NAME: "Missing Server",
CONF_SUBSCRIPTION: "555555",
} # Sub not associated with API key (not in server_list)
vultr.setup_platform(hass, bad_conf, add_entities, None)
| rohitranjan1991/home-assistant | tests/components/vultr/test_binary_sensor.py | Python | mit | 3,539 |
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provide a class for collecting info on one builder run.
There are two public classes, BuilderRun and ChildBuilderRun, that serve
this function. The first is for most situations, the second is for "child"
configs within a builder config that has entries in "child_configs".
Almost all functionality is within the common _BuilderRunBase class. The
only thing the BuilderRun and ChildBuilderRun classes are responsible for
is overriding the self.config value in the _BuilderRunBase object whenever
it is accessed.
It is important to note that for one overall run, there will be one
BuilderRun object and zero or more ChildBuilderRun objects, but they
will all share the same _BuilderRunBase *object*. This means, for example,
that run attributes (e.g. self.attrs.release_tag) are shared between them
all, as intended.
"""
import cPickle
import functools
import os
try:
import Queue
except ImportError:
# Python-3 renamed to "queue". We still use Queue to avoid collisions
# with naming variables as "queue". Maybe we'll transition at some point.
# pylint: disable=F0401
import queue as Queue
import types
from chromite.cbuildbot import archive_lib
from chromite.cbuildbot import metadata_lib
from chromite.cbuildbot import constants
from chromite.cbuildbot import manifest_version
from chromite.cbuildbot import portage_utilities
from chromite.cbuildbot import validation_pool
class RunAttributesError(Exception):
"""Base class for exceptions related to RunAttributes behavior."""
def __str__(self):
"""Handle stringify because base class will just spit out self.args."""
return self.msg
class ParallelAttributeError(AttributeError):
"""Custom version of AttributeError."""
def __init__(self, attr, board=None, target=None, *args):
if board or target:
self.msg = ('No such board-specific parallel run attribute %r for %s/%s' %
(attr, board, target))
else:
self.msg = 'No such parallel run attribute %r' % attr
super(ParallelAttributeError, self).__init__(self.msg, *args)
self.args = (attr, board, target) + tuple(args)
def __str__(self):
return self.msg
class AttrSepCountError(ValueError):
"""Custom version of ValueError for when BOARD_ATTR_SEP is misused."""
def __init__(self, attr, *args):
self.msg = ('Attribute name has an unexpected number of "%s" occurrences'
' in it: %s' % (RunAttributes.BOARD_ATTR_SEP, attr))
super(AttrSepCountError, self).__init__(self.msg, *args)
self.args = (attr, ) + tuple(args)
def __str__(self):
return self.msg
class AttrNotPickleableError(RunAttributesError):
"""For when attribute value to queue is not pickleable."""
def __init__(self, attr, value, *args):
self.msg = 'Run attribute "%s" value cannot be pickled: %r' % (attr, value)
super(AttrNotPickleableError, self).__init__(self.msg, *args)
self.args = (attr, value) + tuple(args)
class AttrTimeoutError(RunAttributesError):
"""For when timeout is reached while waiting for attribute value."""
def __init__(self, attr, *args):
self.msg = 'Timed out waiting for value for run attribute "%s".' % attr
super(AttrTimeoutError, self).__init__(self.msg, *args)
self.args = (attr, ) + tuple(args)
class LockableQueue(object):
"""Multiprocessing queue with associated recursive lock.
Objects of this class function just like a regular multiprocessing Queue,
except that there is also an rlock attribute for getting a multiprocessing
RLock associated with this queue. Actual locking must still be handled by
the calling code. Example usage:
with queue.rlock:
... process the queue in some way.
"""
def __init__(self, manager):
self._queue = manager.Queue()
self.rlock = manager.RLock()
def __getattr__(self, attr):
"""Relay everything to the underlying Queue object at self._queue."""
return getattr(self._queue, attr)
class RunAttributes(object):
"""Hold all run attributes for a particular builder run.
There are two supported flavors of run attributes: REGULAR attributes are
available only to stages that are run sequentially as part of the main (top)
process and PARALLEL attributes are available to all stages, no matter what
process they are in. REGULAR attributes are accessed directly as normal
attributes on a RunAttributes object, while PARALLEL attributes are accessed
through the {Set|Has|Get}Parallel methods. PARALLEL attributes also have the
restriction that their values must be pickle-able (in order to be sent
through multiprocessing queue).
The currently supported attributes of each kind are listed in REGULAR_ATTRS
and PARALLEL_ATTRS below. To add support for a new run attribute simply
add it to one of those sets.
A subset of PARALLEL_ATTRS is BOARD_ATTRS. These attributes only have meaning
in the context of a specific board and config target. The attributes become
available once a board/config is registered for a run, and then they can be
accessed through the {Set|Has|Get}BoardParallel methods or through the
{Get|Set|Has}Parallel methods of a BoardRunAttributes object. The latter is
encouraged.
To add a new BOARD attribute simply add it to the BOARD_ATTRS set below, which
will also add it to PARALLEL_ATTRS (all BOARD attributes are assumed to need
PARALLEL support).
"""
REGULAR_ATTRS = frozenset((
'chrome_version', # Set by SyncChromeStage, if it runs.
'manifest_manager', # Set by ManifestVersionedSyncStage.
'release_tag', # Set by cbuildbot after sync stage.
'metadata', # Used by various build stages to record metadata.
))
# TODO(mtennant): It might be useful to have additional info for each board
# attribute: 1) a log-friendly pretty name, 2) a rough upper bound timeout
# value for consumers of the attribute to use when waiting for it.
BOARD_ATTRS = frozenset((
'breakpad_symbols_generated', # Set by DebugSymbolsStage.
'debug_tarball_generated', # Set by DebugSymbolsStage.
'images_generated', # Set by BuildImageStage.
'instruction_urls_per_channel', # Set by ArchiveStage
))
# Attributes that need to be set by stages that can run in parallel
# (i.e. in a subprocess) must be included here. All BOARD_ATTRS are
# assumed to fit into this category.
PARALLEL_ATTRS = BOARD_ATTRS | frozenset((
'unittest_value', # For unittests. An example of a PARALLEL attribute
# that is not also a BOARD attribute.
))
# This separator is used to create a unique attribute name for any
# board-specific attribute. For example:
# breakpad_symbols_generated||stumpy||stumpy-full-config
BOARD_ATTR_SEP = '||'
# Sanity check, make sure there is no overlap between the attr groups.
assert not REGULAR_ATTRS & PARALLEL_ATTRS
# REGULAR_ATTRS show up as attributes directly on the RunAttributes object.
__slots__ = tuple(REGULAR_ATTRS) + (
'_board_targets', # Set of registered board/target combinations.
'_manager', # The multiprocessing.Manager to use.
'_queues', # Dict of parallel attribute names to LockableQueues.
)
def __init__(self, multiprocess_manager):
# Create queues for all non-board-specific parallel attributes now.
# Parallel board attributes must wait for the board to be registered.
self._manager = multiprocess_manager
self._queues = {}
for attr in RunAttributes.PARALLEL_ATTRS:
if attr not in RunAttributes.BOARD_ATTRS:
# pylint: disable=E1101
self._queues[attr] = LockableQueue(self._manager)
# Set of known <board>||<target> combinations.
self._board_targets = set()
def RegisterBoardAttrs(self, board, target):
"""Register a new valid board/target combination. Safe to repeat.
Args:
board: Board name to register.
target: Build config name to register.
Returns:
A new BoardRunAttributes object for more convenient access to the newly
registered attributes specific to this board/target combination.
"""
board_target = RunAttributes.BOARD_ATTR_SEP.join((board, target))
if not board_target in self._board_targets:
# Register board/target as a known board/target.
self._board_targets.add(board_target)
# For each board attribute that should be queue-able, create its queue
# now. Queues are kept by the uniquified run attribute name.
for attr in RunAttributes.BOARD_ATTRS:
# Every attr in BOARD_ATTRS is in PARALLEL_ATTRS, by construction.
# pylint: disable=E1101
uniquified_attr = self._GetBoardAttrName(attr, board, target)
self._queues[uniquified_attr] = LockableQueue(self._manager)
return BoardRunAttributes(self, board, target)
# TODO(mtennant): Complain if a child process attempts to set a non-parallel
# run attribute? It could be done something like this:
#def __setattr__(self, attr, value):
# """Override __setattr__ to prevent misuse of run attributes."""
# if attr in self.REGULAR_ATTRS:
# assert not self._IsChildProcess()
# super(RunAttributes, self).__setattr__(attr, value)
@staticmethod
def _GetBoardAttrName(attr, board, target):
"""Translate plain |attr| to uniquified board attribute name.
Args:
attr: Plain run attribute name.
board: Board name.
target: Build config name.
Returns:
The uniquified board-specific attribute name.
"""
# Translate to the unique attribute name for attr/board/target.
return RunAttributes.BOARD_ATTR_SEP.join((attr, board, target))
def SetBoardParallel(self, attr, value, board, target):
"""Set board-specific parallel run attribute value.
Args:
attr: Plain board run attribute name.
value: Value to set.
board: Board name.
target: Build config name.
"""
unique_attr = self._GetBoardAttrName(attr, board, target)
try:
self.SetParallel(unique_attr, value)
except ParallelAttributeError:
# Clarify the AttributeError.
raise ParallelAttributeError(attr, board=board, target=target)
def HasBoardParallel(self, attr, board, target):
"""Return True if board-specific parallel run attribute is known and set.
Args:
attr: Plain board run attribute name.
board: Board name.
target: Build config name.
"""
unique_attr = self._GetBoardAttrName(attr, board, target)
return self.HasParallel(unique_attr)
def SetBoardParallelDefault(self, attr, default_value, board, target):
"""Set board-specific parallel run attribute value, if not already set.
Args:
attr: Plain board run attribute name.
default_value: Value to set.
board: Board name.
target: Build config name.
"""
if not self.HasBoardParallel(attr, board, target):
self.SetBoardParallel(attr, default_value, board, target)
def GetBoardParallel(self, attr, board, target, timeout=0):
"""Get board-specific parallel run attribute value.
Args:
attr: Plain board run attribute name.
board: Board name.
target: Build config name.
timeout: See GetParallel for description.
Returns:
The value found.
"""
unique_attr = self._GetBoardAttrName(attr, board, target)
try:
return self.GetParallel(unique_attr, timeout=timeout)
except ParallelAttributeError:
# Clarify the AttributeError.
raise ParallelAttributeError(attr, board=board, target=target)
def _GetQueue(self, attr, strict=False):
"""Return the queue for the given attribute, if it exists.
Args:
attr: The run attribute name.
strict: If True, then complain if queue for |attr| is not found.
Returns:
The LockableQueue for this attribute, if it has one, or None
(assuming strict is False).
Raises:
ParallelAttributeError if no queue for this attribute is registered,
meaning no parallel attribute by this name is known.
"""
queue = self._queues.get(attr)
if queue is None and strict:
raise ParallelAttributeError(attr)
return queue
def SetParallel(self, attr, value):
"""Set the given parallel run attribute value.
Called to set the value of any parallel run attribute. The value is
saved onto a multiprocessing queue for that attribute.
Args:
attr: Name of the attribute.
value: Value to give the attribute. This value must be pickleable.
Raises:
ParallelAttributeError if attribute is not a valid parallel attribute.
AttrNotPickleableError if value cannot be pickled, meaning it cannot
go through the queue system.
"""
# Confirm that value can be pickled, because otherwise it will fail
# in the queue.
try:
cPickle.dumps(value, cPickle.HIGHEST_PROTOCOL)
except cPickle.PicklingError:
raise AttrNotPickleableError(attr, value)
queue = self._GetQueue(attr, strict=True)
with queue.rlock:
# First empty the queue. Any value already on the queue is now stale.
while True:
try:
queue.get(False)
except Queue.Empty:
break
queue.put(value)
def HasParallel(self, attr):
"""Return True if the given parallel run attribute is known and set.
Args:
attr: Name of the attribute.
"""
try:
queue = self._GetQueue(attr, strict=True)
with queue.rlock:
return not queue.empty()
except ParallelAttributeError:
return False
def SetParallelDefault(self, attr, default_value):
"""Set the given parallel run attribute only if it is not already set.
This leverages HasParallel and SetParallel in a convenient pattern.
Args:
attr: Name of the attribute.
default_value: Value to give the attribute if it is not set. This value
must be pickleable.
Raises:
ParallelAttributeError if attribute is not a valid parallel attribute.
AttrNotPickleableError if value cannot be pickled, meaning it cannot
go through the queue system.
"""
if not self.HasParallel(attr):
self.SetParallel(attr, default_value)
# TODO(mtennant): Add an option to log access, including the time to wait
# or waited. It could be enabled with an optional announce=False argument.
# See GetParallel helper on BoardSpecificBuilderStage class for ideas.
def GetParallel(self, attr, timeout=0):
"""Get value for the given parallel run attribute, optionally waiting.
If the given parallel run attr already has a value in the queue it will
return that value right away. Otherwise, it will wait for a value to
appear in the queue up to the timeout specified (timeout of None means
wait forever) before returning the value found or raising AttrTimeoutError
if a timeout was reached.
Args:
attr: The name of the run attribute.
timeout: Timeout, in seconds. A None value means wait forever,
which is probably never a good idea. A value of 0 does not wait at all.
Raises:
ParallelAttributeError if attribute is not set and timeout was 0.
AttrTimeoutError if timeout is greater than 0 and timeout is reached
before a value is available on the queue.
"""
got_value = False
queue = self._GetQueue(attr, strict=True)
# First attempt to get a value off the queue, without the lock. This
# allows a blocking get to wait for a value to appear.
try:
value = queue.get(True, timeout)
got_value = True
except Queue.Empty:
# This means there is nothing on the queue. Let this fall through to
# the locked code block to see if another process is in the process
# of re-queuing a value. Any process doing that will have a lock.
pass
# Now grab the queue lock and flush any other values that are on the queue.
# This should only happen if another process put a value in after our first
# queue.get above. If so, accept the updated value.
with queue.rlock:
while True:
try:
value = queue.get(False)
got_value = True
except Queue.Empty:
break
if got_value:
# First re-queue the value, then return it.
queue.put(value)
return value
else:
# Handle no value differently depending on whether timeout is 0.
if timeout == 0:
raise ParallelAttributeError(attr)
else:
raise AttrTimeoutError(attr)
class BoardRunAttributes(object):
"""Convenience class for accessing board-specific run attributes.
Board-specific run attributes (actually board/target-specific) are saved in
the RunAttributes object but under uniquified names. A BoardRunAttributes
object provides access to these attributes using their plain names by
providing the board/target information where needed.
For example, to access the breakpad_symbols_generated board run attribute on
a regular RunAttributes object requires this:
value = attrs.GetBoardParallel('breakpad_symbols_generated', board, target)
But on a BoardRunAttributes object:
boardattrs = BoardRunAttributes(attrs, board, target)
...
value = boardattrs.GetParallel('breakpad_symbols_generated')
The same goes for setting values.
"""
__slots__ = ('_attrs', '_board', '_target')
def __init__(self, attrs, board, target):
"""Initialize.
Args:
attrs: The main RunAttributes object.
board: The board name this is specific to.
target: The build config name this is specific to.
"""
self._attrs = attrs
self._board = board
self._target = target
def SetParallel(self, attr, value, *args, **kwargs):
"""Set the value of parallel board attribute |attr| to |value|.
Relay to SetBoardParallel on self._attrs, supplying board and target.
See documentation on RunAttributes.SetBoardParallel for more details.
"""
self._attrs.SetBoardParallel(attr, value, self._board, self._target,
*args, **kwargs)
def HasParallel(self, attr, *args, **kwargs):
"""Return True if parallel board attribute |attr| exists.
Relay to HasBoardParallel on self._attrs, supplying board and target.
See documentation on RunAttributes.HasBoardParallel for more details.
"""
return self._attrs.HasBoardParallel(attr, self._board, self._target,
*args, **kwargs)
def SetParallelDefault(self, attr, default_value, *args, **kwargs):
"""Set the value of parallel board attribute |attr| to |value|, if not set.
Relay to SetBoardParallelDefault on self._attrs, supplying board and target.
See documentation on RunAttributes.SetBoardParallelDefault for more details.
"""
self._attrs.SetBoardParallelDefault(attr, default_value, self._board,
self._target, *args, **kwargs)
def GetParallel(self, attr, *args, **kwargs):
"""Get the value of parallel board attribute |attr|.
Relay to GetBoardParallel on self._attrs, supplying board and target.
See documentation on RunAttributes.GetBoardParallel for more details.
"""
return self._attrs.GetBoardParallel(attr, self._board, self._target,
*args, **kwargs)
# TODO(mtennant): Consider renaming this _BuilderRunState, then renaming
# _RealBuilderRun to _BuilderRunBase.
class _BuilderRunBase(object):
"""Class to represent one run of a builder.
This class should never be instantiated directly, but instead be
instantiated as part of a BuilderRun object.
"""
# Class-level dict of RunAttributes objects to make it less
# problematic to send BuilderRun objects between processes through
# pickle. The 'attrs' attribute on a BuilderRun object will look
# up the RunAttributes for that particular BuilderRun here.
_ATTRS = {}
__slots__ = (
'config', # BuildConfig for this run.
'options', # The cbuildbot options object for this run.
# Run attributes set/accessed by stages during the run. To add support
# for a new run attribute add it to the RunAttributes class above.
'_attrs_id', # Object ID for looking up self.attrs.
# Some pre-computed run configuration values.
'buildnumber', # The build number for this run.
'buildroot', # The build root path for this run.
'debug', # Boolean, represents "dry run" concept, really.
'manifest_branch', # The manifest branch to build and test for this run.
# Some attributes are available as properties. In particular, attributes
# that use self.config must be determined after __init__.
# self.bot_id # Effective name of builder for this run.
# TODO(mtennant): Other candidates here include:
# trybot, buildbot, remote_trybot, chrome_root,
# test = (config build_tests AND option tests)
)
def __init__(self, options, multiprocess_manager):
self.options = options
# Note that self.config is filled in dynamically by either of the classes
# that are actually instantiated: BuilderRun and ChildBuilderRun. In other
# words, self.config can be counted on anywhere except in this __init__.
# The implication is that any plain attributes that are calculated from
# self.config contents must be provided as properties (or methods).
# See the _RealBuilderRun class and its __getattr__ method for details.
self.config = None
# Create the RunAttributes object for this BuilderRun and save
# the id number for it in order to look it up via attrs property.
attrs = RunAttributes(multiprocess_manager)
self._ATTRS[id(attrs)] = attrs
self._attrs_id = id(attrs)
# Fill in values for all pre-computed "run configs" now, which are frozen
# by this time.
# TODO(mtennant): Should this use os.path.abspath like builderstage does?
self.buildroot = self.options.buildroot
self.buildnumber = self.options.buildnumber
self.manifest_branch = self.options.branch
# For remote_trybot runs, options.debug is implied, but we want true dryrun
# mode only if --debug was actually specified (i.e. options.debug_forced).
# TODO(mtennant): Get rid of confusing debug and debug_forced, if at all
# possible. Also, eventually use "dry_run" and "verbose" options instead to
# represent two distinct concepts.
self.debug = self.options.debug
if self.options.remote_trybot:
self.debug = self.options.debug_forced
# Certain run attributes have sensible defaults which can be set here.
# This allows all code to safely assume that the run attribute exists.
attrs.chrome_version = None
attrs.metadata = metadata_lib.CBuildbotMetadata(
multiprocess_manager=multiprocess_manager)
@property
def bot_id(self):
"""Return the bot_id for this run."""
return self.config.GetBotId(remote_trybot=self.options.remote_trybot)
@property
def attrs(self):
"""Look up the RunAttributes object for this BuilderRun object."""
return self._ATTRS[self._attrs_id]
def IsToTBuild(self):
"""Returns True if Builder is running on ToT."""
return self.manifest_branch == 'master'
def GetArchive(self):
"""Create an Archive object for this BuilderRun object."""
# The Archive class is very lightweight, and is read-only, so it
# is ok to generate a new one on demand. This also avoids worrying
# about whether it can go through pickle.
# Almost everything the Archive class does requires GetVersion(),
# which means it cannot be used until the version has been settled on.
# However, because it does have some use before then we provide
# the GetVersion function itself to be called when needed later.
return archive_lib.Archive(self.bot_id, self.GetVersion, self.options,
self.config)
def GetBoardRunAttrs(self, board):
"""Create a BoardRunAttributes object for this run and given |board|."""
return BoardRunAttributes(self.attrs, board, self.config.name)
def ConstructDashboardURL(self, stage=None):
"""Return the dashboard URL
This is the direct link to buildbot logs as seen in build.chromium.org
Args:
stage: Link to a specific |stage|, otherwise the general buildbot log
Returns:
The fully formed URL
"""
return validation_pool.ValidationPool.ConstructDashboardURL(
self.config.overlays, self.options.remote_trybot,
os.environ.get('BUILDBOT_BUILDERNAME', self.config.name),
self.options.buildnumber, stage=stage)
def ShouldBuildAutotest(self):
"""Return True if this run should build autotest and artifacts."""
return self.config.build_tests and self.options.tests
def ShouldUploadPrebuilts(self):
"""Return True if this run should upload prebuilts."""
return self.options.prebuilts and self.config.prebuilts
def ShouldReexecAfterSync(self):
"""Return True if this run should re-exec itself after sync stage."""
if self.options.postsync_reexec and self.config.postsync_reexec:
# Return True if this source is not in designated buildroot.
abs_buildroot = os.path.abspath(self.buildroot)
return not os.path.abspath(__file__).startswith(abs_buildroot)
return False
def ShouldPatchAfterSync(self):
"""Return True if this run should patch changes after sync stage."""
return self.options.postsync_patch and self.config.postsync_patch
@classmethod
def GetVersionInfo(cls, buildroot):
"""Helper for picking apart various version bits.
This method only exists so that tests can override it.
"""
return manifest_version.VersionInfo.from_repo(buildroot)
def GetVersion(self):
"""Calculate full R<chrome_version>-<chromeos_version> version string.
It is required that the sync stage be run before this method is called.
Returns:
The version string for this run.
Raises:
AssertionError if the sync stage has not been run first.
"""
# This method should never be called before the sync stage has run, or
# it would return a confusing value.
assert hasattr(self.attrs, 'release_tag'), 'Sync stage must run first.'
verinfo = self.GetVersionInfo(self.buildroot)
release_tag = self.attrs.release_tag
if release_tag:
calc_version = 'R%s-%s' % (verinfo.chrome_branch, release_tag)
else:
# Non-versioned builds need the build number to uniquify the image.
calc_version = 'R%s-%s-b%s' % (verinfo.chrome_branch,
verinfo.VersionString(),
self.buildnumber)
return calc_version
def DetermineChromeVersion(self):
"""Determine the current Chrome version in buildroot now and return it.
This uses the typical portage logic to determine which version of Chrome
is active right now in the buildroot.
Returns:
The new value of attrs.chrome_version (e.g. "35.0.1863.0").
"""
cpv = portage_utilities.BestVisible(constants.CHROME_CP,
buildroot=self.buildroot)
return cpv.version_no_rev.partition('_')[0]
class _RealBuilderRun(object):
"""Base BuilderRun class that manages self.config access.
For any builder run, sometimes the build config is the top-level config and
sometimes it is a "child" config. In either case, the config to use should
override self.config for all cases. This class provides a mechanism for
overriding self.config access generally.
Also, methods that do more than access state for a BuilderRun should
live here. In particular, any method that uses 'self' as an object
directly should be here rather than _BuilderRunBase.
"""
__slots__ = _BuilderRunBase.__slots__ + (
'_run_base', # The _BuilderRunBase object where most functionality is.
'_config', # Config to use for dynamically overriding self.config.
)
def __init__(self, run_base, build_config):
self._run_base = run_base
self._config = build_config
# Make sure self.attrs has board-specific attributes for each board
# in build_config.
for board in build_config.boards:
self.attrs.RegisterBoardAttrs(board, build_config.name)
def __getattr__(self, attr):
# Remember, __getattr__ only called if attribute was not found normally.
# In normal usage, the __init__ guarantees that self._run_base and
# self._config will be present. However, the unpickle process bypasses
# __init__, and this object must be pickle-able. That is why we access
# self._run_base and self._config through __getattribute__ here, otherwise
# unpickling results in infinite recursion.
# TODO(mtennant): Revisit this if pickling support is changed to go through
# the __init__ method, such as by supplying __reduce__ method.
run_base = self.__getattribute__('_run_base')
config = self.__getattribute__('_config')
try:
# run_base.config should always be None except when accessed through
# this routine. Override the value here, then undo later.
run_base.config = config
result = getattr(run_base, attr)
if isinstance(result, types.MethodType):
# Make sure run_base.config is also managed when the method is called.
@functools.wraps(result)
def FuncWrapper(*args, **kwargs):
run_base.config = config
try:
return result(*args, **kwargs)
finally:
run_base.config = None
# TODO(mtennant): Find a way to make the following actually work. It
# makes pickling more complicated, unfortunately.
# Cache this function wrapper to re-use next time without going through
# __getattr__ again. This ensures that the same wrapper object is used
# each time, which is nice for identity and equality checks. Subtle
# gotcha that we accept: if the function itself on run_base is replaced
# then this will continue to provide the behavior of the previous one.
#setattr(self, attr, FuncWrapper)
return FuncWrapper
else:
return result
finally:
run_base.config = None
def _GetChildren(self):
"""Get ChildBuilderRun objects for child configs, if they exist.
Returns:
List of ChildBuilderRun objects if self.config has child_configs. None
otherwise.
"""
# If there are child configs, construct a list of ChildBuilderRun objects
# for those child configs and return that.
if self.config.child_configs:
return [ChildBuilderRun(self, ix)
for ix in range(len(self.config.child_configs))]
def GetUngroupedBuilderRuns(self):
"""Same as _GetChildren, but defaults to [self] if no children exist.
Returns:
Result of self._GetChildren, if children exist, otherwise [self].
"""
return self._GetChildren() or [self]
def GetBuilderIds(self):
"""Return a list of builder names for this config and the child configs."""
bot_ids = [self.config.name]
for config in self.config.child_configs:
if config.name:
bot_ids.append(config.name)
return bot_ids
class BuilderRun(_RealBuilderRun):
"""A standard BuilderRun for a top-level build config."""
def __init__(self, options, build_config, multiprocess_manager):
"""Initialize.
Args:
options: Command line options from this cbuildbot run.
build_config: Build config for this cbuildbot run.
multiprocess_manager: A multiprocessing.Manager.
"""
run_base = _BuilderRunBase(options, multiprocess_manager)
super(BuilderRun, self).__init__(run_base, build_config)
class ChildBuilderRun(_RealBuilderRun):
"""A BuilderRun for a "child" build config."""
def __init__(self, builder_run, child_index):
"""Initialize.
Args:
builder_run: BuilderRun for the parent (main) cbuildbot run. Extract
the _BuilderRunBase from it to make sure the same base is used for
both the main cbuildbot run and any child runs.
child_index: The child index of this child run, used to index into
the main run's config.child_configs.
"""
# pylint: disable=W0212
run_base = builder_run._run_base
config = builder_run.config.child_configs[child_index]
super(ChildBuilderRun, self).__init__(run_base, config)
| chadversary/chromiumos.chromite | cbuildbot/cbuildbot_run.py | Python | bsd-3-clause | 32,661 |
"""
*Scratch_n_sketch scripting
* Matrix demo
"""
from libs.board import *
#init
mbd = scratch_n_sketch()
#connect board
mbd.connect()
#start
print('Draw circles demo')
#initialize
mbd.backGroundColor(0, 0, 0)
#rotate by 90 deg
mbd.rotateDisplay(mbd.rotate_90)
wait(5)
#variables
xloc = 20
yloc = 10
#start loop
for x in range(0, 15):
for y in range(0, 12):
#select a random pen color
mbd.penColor(randomNumber(0, 255),
randomNumber(0, 255),
randomNumber(0, 255))
mbd.drawCircle(xloc, yloc, 5)
yloc += 20
#delay for 10ms
wait(50)
xloc += 20
yloc = 10
#disconnect port
mbd.disconnect()
| warefab/scratch-n-sketch | python/circles.py | Python | mit | 688 |
import os
import stat
from datetime import datetime
from ._compat import open_stream, text_type, filename_to_ui, \
get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2
from .exceptions import BadParameter
from .utils import safecall, LazyFile
class ParamType(object):
"""Helper for converting values through types. The following is
necessary for a valid type:
* it needs a name
* it needs to pass through None unchanged
* it needs to convert from a string
* it needs to convert its result type through unchanged
(eg: needs to be idempotent)
* it needs to be able to deal with param and context being `None`.
This can be the case when the object is used with prompt
inputs.
"""
is_composite = False
#: the descriptive name of this type
name = None
#: if a list of this type is expected and the value is pulled from a
#: string environment variable, this is what splits it up. `None`
#: means any whitespace. For all parameters the general rule is that
#: whitespace splits them up. The exception are paths and files which
#: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
#: Windows).
envvar_list_splitter = None
def __call__(self, value, param=None, ctx=None):
if value is not None:
return self.convert(value, param, ctx)
def get_metavar(self, param):
"""Returns the metavar default for this param if it provides one."""
def get_missing_message(self, param):
"""Optionally might return extra information about a missing
parameter.
.. versionadded:: 2.0
"""
def convert(self, value, param, ctx):
"""Converts the value. This is not invoked for values that are
`None` (the missing value).
"""
return value
def split_envvar_value(self, rv):
"""Given a value from an environment variable this splits it up
into small chunks depending on the defined envvar list splitter.
If the splitter is set to `None`, which means that whitespace splits,
then leading and trailing whitespace is ignored. Otherwise, leading
and trailing splitters usually lead to empty items being included.
"""
return (rv or '').split(self.envvar_list_splitter)
def fail(self, message, param=None, ctx=None):
"""Helper method to fail with an invalid value message."""
raise BadParameter(message, ctx=ctx, param=param)
class CompositeParamType(ParamType):
is_composite = True
@property
def arity(self):
raise NotImplementedError()
class FuncParamType(ParamType):
def __init__(self, func):
self.name = func.__name__
self.func = func
def convert(self, value, param, ctx):
try:
return self.func(value)
except ValueError:
try:
value = text_type(value)
except UnicodeError:
value = str(value).decode('utf-8', 'replace')
self.fail(value, param, ctx)
class UnprocessedParamType(ParamType):
name = 'text'
def convert(self, value, param, ctx):
return value
def __repr__(self):
return 'UNPROCESSED'
class StringParamType(ParamType):
name = 'text'
def convert(self, value, param, ctx):
if isinstance(value, bytes):
enc = _get_argv_encoding()
try:
value = value.decode(enc)
except UnicodeError:
fs_enc = get_filesystem_encoding()
if fs_enc != enc:
try:
value = value.decode(fs_enc)
except UnicodeError:
value = value.decode('utf-8', 'replace')
return value
return value
def __repr__(self):
return 'STRING'
class Choice(ParamType):
"""The choice type allows a value to be checked against a fixed set
of supported values. All of these values have to be strings.
You should only pass a list or tuple of choices. Other iterables
(like generators) may lead to surprising results.
See :ref:`choice-opts` for an example.
:param case_sensitive: Set to false to make choices case
insensitive. Defaults to true.
"""
name = 'choice'
def __init__(self, choices, case_sensitive=True):
self.choices = choices
self.case_sensitive = case_sensitive
def get_metavar(self, param):
return '[%s]' % '|'.join(self.choices)
def get_missing_message(self, param):
return 'Choose from:\n\t%s.' % ',\n\t'.join(self.choices)
def convert(self, value, param, ctx):
# Exact match
if value in self.choices:
return value
# Match through normalization and case sensitivity
# first do token_normalize_func, then lowercase
# preserve original `value` to produce an accurate message in
# `self.fail`
normed_value = value
normed_choices = self.choices
if ctx is not None and \
ctx.token_normalize_func is not None:
normed_value = ctx.token_normalize_func(value)
normed_choices = [ctx.token_normalize_func(choice) for choice in
self.choices]
if not self.case_sensitive:
normed_value = normed_value.lower()
normed_choices = [choice.lower() for choice in normed_choices]
if normed_value in normed_choices:
return normed_value
self.fail('invalid choice: %s. (choose from %s)' %
(value, ', '.join(self.choices)), param, ctx)
def __repr__(self):
return 'Choice(%r)' % list(self.choices)
class DateTime(ParamType):
"""The DateTime type converts date strings into `datetime` objects.
The format strings which are checked are configurable, but default to some
common (non-timezone aware) ISO 8601 formats.
When specifying *DateTime* formats, you should only pass a list or a tuple.
Other iterables, like generators, may lead to surprising results.
The format strings are processed using ``datetime.strptime``, and this
consequently defines the format strings which are allowed.
Parsing is tried using each format, in order, and the first format which
parses successfully is used.
:param formats: A list or tuple of date format strings, in the order in
which they should be tried. Defaults to
``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``,
``'%Y-%m-%d %H:%M:%S'``.
"""
name = 'datetime'
def __init__(self, formats=None):
self.formats = formats or [
'%Y-%m-%d',
'%Y-%m-%dT%H:%M:%S',
'%Y-%m-%d %H:%M:%S'
]
def get_metavar(self, param):
return '[{}]'.format('|'.join(self.formats))
def _try_to_convert_date(self, value, format):
try:
return datetime.strptime(value, format)
except ValueError:
return None
def convert(self, value, param, ctx):
# Exact match
for format in self.formats:
dtime = self._try_to_convert_date(value, format)
if dtime:
return dtime
self.fail(
'invalid datetime format: {}. (choose from {})'.format(
value, ', '.join(self.formats)))
def __repr__(self):
return 'DateTime'
class IntParamType(ParamType):
name = 'integer'
def convert(self, value, param, ctx):
try:
return int(value)
except (ValueError, UnicodeError):
self.fail('%s is not a valid integer' % value, param, ctx)
def __repr__(self):
return 'INT'
class IntRange(IntParamType):
"""A parameter that works similar to :data:`click.INT` but restricts
the value to fit into a range. The default behavior is to fail if the
value falls outside the range, but it can also be silently clamped
between the two edges.
See :ref:`ranges` for an example.
"""
name = 'integer range'
def __init__(self, min=None, max=None, clamp=False):
self.min = min
self.max = max
self.clamp = clamp
def convert(self, value, param, ctx):
rv = IntParamType.convert(self, value, param, ctx)
if self.clamp:
if self.min is not None and rv < self.min:
return self.min
if self.max is not None and rv > self.max:
return self.max
if self.min is not None and rv < self.min or \
self.max is not None and rv > self.max:
if self.min is None:
self.fail('%s is bigger than the maximum valid value '
'%s.' % (rv, self.max), param, ctx)
elif self.max is None:
self.fail('%s is smaller than the minimum valid value '
'%s.' % (rv, self.min), param, ctx)
else:
self.fail('%s is not in the valid range of %s to %s.'
% (rv, self.min, self.max), param, ctx)
return rv
def __repr__(self):
return 'IntRange(%r, %r)' % (self.min, self.max)
class FloatParamType(ParamType):
name = 'float'
def convert(self, value, param, ctx):
try:
return float(value)
except (UnicodeError, ValueError):
self.fail('%s is not a valid floating point value' %
value, param, ctx)
def __repr__(self):
return 'FLOAT'
class FloatRange(FloatParamType):
"""A parameter that works similar to :data:`click.FLOAT` but restricts
the value to fit into a range. The default behavior is to fail if the
value falls outside the range, but it can also be silently clamped
between the two edges.
See :ref:`ranges` for an example.
"""
name = 'float range'
def __init__(self, min=None, max=None, clamp=False):
self.min = min
self.max = max
self.clamp = clamp
def convert(self, value, param, ctx):
rv = FloatParamType.convert(self, value, param, ctx)
if self.clamp:
if self.min is not None and rv < self.min:
return self.min
if self.max is not None and rv > self.max:
return self.max
if self.min is not None and rv < self.min or \
self.max is not None and rv > self.max:
if self.min is None:
self.fail('%s is bigger than the maximum valid value '
'%s.' % (rv, self.max), param, ctx)
elif self.max is None:
self.fail('%s is smaller than the minimum valid value '
'%s.' % (rv, self.min), param, ctx)
else:
self.fail('%s is not in the valid range of %s to %s.'
% (rv, self.min, self.max), param, ctx)
return rv
def __repr__(self):
return 'FloatRange(%r, %r)' % (self.min, self.max)
class BoolParamType(ParamType):
name = 'boolean'
def convert(self, value, param, ctx):
if isinstance(value, bool):
return bool(value)
value = value.lower()
if value in ('true', 't', '1', 'yes', 'y'):
return True
elif value in ('false', 'f', '0', 'no', 'n'):
return False
self.fail('%s is not a valid boolean' % value, param, ctx)
def __repr__(self):
return 'BOOL'
class UUIDParameterType(ParamType):
name = 'uuid'
def convert(self, value, param, ctx):
import uuid
try:
if PY2 and isinstance(value, text_type):
value = value.encode('ascii')
return uuid.UUID(value)
except (UnicodeError, ValueError):
self.fail('%s is not a valid UUID value' % value, param, ctx)
def __repr__(self):
return 'UUID'
class File(ParamType):
"""Declares a parameter to be a file for reading or writing. The file
is automatically closed once the context tears down (after the command
finished working).
Files can be opened for reading or writing. The special value ``-``
indicates stdin or stdout depending on the mode.
By default, the file is opened for reading text data, but it can also be
opened in binary mode or for writing. The encoding parameter can be used
to force a specific encoding.
The `lazy` flag controls if the file should be opened immediately or upon
first IO. The default is to be non-lazy for standard input and output
streams as well as files opened for reading, `lazy` otherwise. When opening a
file lazily for reading, it is still opened temporarily for validation, but
will not be held open until first IO. lazy is mainly useful when opening
for writing to avoid creating the file until it is needed.
Starting with Click 2.0, files can also be opened atomically in which
case all writes go into a separate file in the same folder and upon
completion the file will be moved over to the original location. This
is useful if a file regularly read by other users is modified.
See :ref:`file-args` for more information.
"""
name = 'filename'
envvar_list_splitter = os.path.pathsep
def __init__(self, mode='r', encoding=None, errors='strict', lazy=None,
atomic=False):
self.mode = mode
self.encoding = encoding
self.errors = errors
self.lazy = lazy
self.atomic = atomic
def resolve_lazy_flag(self, value):
if self.lazy is not None:
return self.lazy
if value == '-':
return False
elif 'w' in self.mode:
return True
return False
def convert(self, value, param, ctx):
try:
if hasattr(value, 'read') or hasattr(value, 'write'):
return value
lazy = self.resolve_lazy_flag(value)
if lazy:
f = LazyFile(value, self.mode, self.encoding, self.errors,
atomic=self.atomic)
if ctx is not None:
ctx.call_on_close(f.close_intelligently)
return f
f, should_close = open_stream(value, self.mode,
self.encoding, self.errors,
atomic=self.atomic)
# If a context is provided, we automatically close the file
# at the end of the context execution (or flush out). If a
# context does not exist, it's the caller's responsibility to
# properly close the file. This for instance happens when the
# type is used with prompts.
if ctx is not None:
if should_close:
ctx.call_on_close(safecall(f.close))
else:
ctx.call_on_close(safecall(f.flush))
return f
except (IOError, OSError) as e:
self.fail('Could not open file: %s: %s' % (
filename_to_ui(value),
get_streerror(e),
), param, ctx)
class Path(ParamType):
"""The path type is similar to the :class:`File` type but it performs
different checks. First of all, instead of returning an open file
handle it returns just the filename. Secondly, it can perform various
basic checks about what the file or directory should be.
.. versionchanged:: 6.0
`allow_dash` was added.
:param exists: if set to true, the file or directory needs to exist for
this value to be valid. If this is not required and a
file does indeed not exist, then all further checks are
silently skipped.
:param file_okay: controls if a file is a possible value.
:param dir_okay: controls if a directory is a possible value.
:param writable: if true, a writable check is performed.
:param readable: if true, a readable check is performed.
:param resolve_path: if this is true, then the path is fully resolved
before the value is passed onwards. This means
that it's absolute and symlinks are resolved. It
will not expand a tilde-prefix, as this is
supposed to be done by the shell only.
:param allow_dash: If this is set to `True`, a single dash to indicate
standard streams is permitted.
:param path_type: optionally a string type that should be used to
represent the path. The default is `None` which
means the return value will be either bytes or
unicode depending on what makes most sense given the
input data Click deals with.
"""
envvar_list_splitter = os.path.pathsep
def __init__(self, exists=False, file_okay=True, dir_okay=True,
writable=False, readable=True, resolve_path=False,
allow_dash=False, path_type=None):
self.exists = exists
self.file_okay = file_okay
self.dir_okay = dir_okay
self.writable = writable
self.readable = readable
self.resolve_path = resolve_path
self.allow_dash = allow_dash
self.type = path_type
if self.file_okay and not self.dir_okay:
self.name = 'file'
self.path_type = 'File'
elif self.dir_okay and not self.file_okay:
self.name = 'directory'
self.path_type = 'Directory'
else:
self.name = 'path'
self.path_type = 'Path'
def coerce_path_result(self, rv):
if self.type is not None and not isinstance(rv, self.type):
if self.type is text_type:
rv = rv.decode(get_filesystem_encoding())
else:
rv = rv.encode(get_filesystem_encoding())
return rv
def convert(self, value, param, ctx):
rv = value
is_dash = self.file_okay and self.allow_dash and rv in (b'-', '-')
if not is_dash:
if self.resolve_path:
rv = os.path.realpath(rv)
try:
st = os.stat(rv)
except OSError:
if not self.exists:
return self.coerce_path_result(rv)
self.fail('%s "%s" does not exist.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if not self.file_okay and stat.S_ISREG(st.st_mode):
self.fail('%s "%s" is a file.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if not self.dir_okay and stat.S_ISDIR(st.st_mode):
self.fail('%s "%s" is a directory.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if self.writable and not os.access(value, os.W_OK):
self.fail('%s "%s" is not writable.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if self.readable and not os.access(value, os.R_OK):
self.fail('%s "%s" is not readable.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
return self.coerce_path_result(rv)
class Tuple(CompositeParamType):
"""The default behavior of Click is to apply a type on a value directly.
This works well in most cases, except for when `nargs` is set to a fixed
count and different types should be used for different items. In this
case the :class:`Tuple` type can be used. This type can only be used
if `nargs` is set to a fixed number.
For more information see :ref:`tuple-type`.
This can be selected by using a Python tuple literal as a type.
:param types: a list of types that should be used for the tuple items.
"""
def __init__(self, types):
self.types = [convert_type(ty) for ty in types]
@property
def name(self):
return "<" + " ".join(ty.name for ty in self.types) + ">"
@property
def arity(self):
return len(self.types)
def convert(self, value, param, ctx):
if len(value) != len(self.types):
raise TypeError('It would appear that nargs is set to conflict '
'with the composite type arity.')
return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))
def convert_type(ty, default=None):
"""Converts a callable or python ty into the most appropriate param
ty.
"""
guessed_type = False
if ty is None and default is not None:
if isinstance(default, tuple):
ty = tuple(map(type, default))
else:
ty = type(default)
guessed_type = True
if isinstance(ty, tuple):
return Tuple(ty)
if isinstance(ty, ParamType):
return ty
if ty is text_type or ty is str or ty is None:
return STRING
if ty is int:
return INT
# Booleans are only okay if not guessed. This is done because for
# flags the default value is actually a bit of a lie in that it
# indicates which of the flags is the one we want. See get_default()
# for more information.
if ty is bool and not guessed_type:
return BOOL
if ty is float:
return FLOAT
if guessed_type:
return STRING
# Catch a common mistake
if __debug__:
try:
if issubclass(ty, ParamType):
raise AssertionError('Attempted to use an uninstantiated '
'parameter type (%s).' % ty)
except TypeError:
pass
return FuncParamType(ty)
#: A dummy parameter type that just does nothing. From a user's
#: perspective this appears to just be the same as `STRING` but internally
#: no string conversion takes place. This is necessary to achieve the
#: same bytes/unicode behavior on Python 2/3 in situations where you want
#: to not convert argument types. This is usually useful when working
#: with file paths as they can appear in bytes and unicode.
#:
#: For path related uses the :class:`Path` type is a better choice but
#: there are situations where an unprocessed type is useful which is why
#: it is is provided.
#:
#: .. versionadded:: 4.0
UNPROCESSED = UnprocessedParamType()
#: A unicode string parameter type which is the implicit default. This
#: can also be selected by using ``str`` as type.
STRING = StringParamType()
#: An integer parameter. This can also be selected by using ``int`` as
#: type.
INT = IntParamType()
#: A floating point value parameter. This can also be selected by using
#: ``float`` as type.
FLOAT = FloatParamType()
#: A boolean parameter. This is the default for boolean flags. This can
#: also be selected by using ``bool`` as a type.
BOOL = BoolParamType()
#: A UUID parameter.
UUID = UUIDParameterType()
| astaninger/speakout | venv/lib/python3.6/site-packages/click/types.py | Python | mit | 23,287 |
from handlers.handler import Handler
_HIGHWAY_ROAD_TAGS = {'road', 'track', 'living_street', 'service', 'unclassified', 'residential', 'tertiary',
'tertiary_link', 'secondary', 'secondary_link', 'primary', 'primary_link', 'trunk', 'trunk_link',
'motorway', 'motorway_link'}
_TRAFFIC_SIGNALS_NOT_ON_ROAD = {
'title': 'Светофор не на дороге',
'help_text': """Светофор расположен не на дороге.""",
}
class HighwayTrafficSignalsChecker(Handler):
def __init__(self):
self._not_on_road = set()
def process_iteration(self, obj, iteration):
if iteration == 0:
if obj.get('highway') == 'traffic_signals' and obj['@type'] == 'node':
self._not_on_road.add(obj['@id'])
elif iteration == 1:
if self._not_on_road:
if obj['@type'] == 'way' and obj.get('highway') in _HIGHWAY_ROAD_TAGS:
tmp = list(self._not_on_road)
highway_nodes = set(obj['@nodes'])
for node_id in tmp:
if node_id in highway_nodes:
self._not_on_road.remove(node_id)
def is_iteration_required(self, iteration):
return iteration < 2
def finish(self, issues):
issues.add_issue_type('errors/traffic_signals/not_on_highway', _TRAFFIC_SIGNALS_NOT_ON_ROAD)
for node_id in self._not_on_road:
issues.add_issue_obj('errors/traffic_signals/not_on_highway', 'node', node_id)
| n0s0r0g/perfect_OSM | handlers/checkers/highway/traffic_signals.py | Python | mit | 1,565 |
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="Tornado-MySQL",
version="0.5.1",
url='https://github.com/PyMySQL/Tornado-MySQL',
author='INADA Naoki',
author_email='songofacandy@gmail.com',
description='Pure Python MySQL Driver for Tornado',
install_requires=['tornado>=4.0'],
license="MIT",
packages=find_packages(),
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Database',
],
)
| PyMySQL/Tornado-MySQL | setup.py | Python | mit | 1,007 |
class _TemplateMetaclass(type):
def __init__(cls, name, bases, dct):
super(_TemplateMetaclass, cls).__init__(name, bases, dct)
| moagstar/python-uncompyle6 | test/simple_source/def/11_classbug.py | Python | mit | 139 |
# -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2014-2020)
#
# This file is part of GWpy.
#
# GWpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy. If not, see <http://www.gnu.org/licenses/>.
"""Utilities for data input/output in standard GW formats.
"""
__author__ = "Duncan Macleod <duncan.macleod@ligo.org>"
| areeda/gwpy | gwpy/io/__init__.py | Python | gpl-3.0 | 831 |
###############################################################################
##
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from vistrails.core.bundles import py_import
py_import('dulwich', {
'pip': 'dulwich',
'linux-debian': 'python-dulwich',
'linux-ubuntu': 'python-dulwich',
'linux-fedora': 'python-dulwich'})
from vistrails.core import debug
from dulwich.errors import NotCommitError, NotGitRepository
from dulwich.repo import Repo
from dulwich.objects import Commit, Blob, Tree, object_header
from dulwich.pack import iter_sha1
from dulwich.walk import Walker
from itertools import chain
import os
import shutil
import stat
import tempfile
class GitRepo(object):
def __init__(self, path):
if os.path.exists(path):
if not os.path.isdir(path):
raise IOError('Git repository "%s" must be a directory.' %
path)
try:
self.repo = Repo(path)
except NotGitRepository:
# repo does not exist
self.repo = Repo.init(path, not os.path.exists(path))
self.temp_persist_files = []
def _get_commit(self, version="HEAD"):
commit = self.repo[version]
if not isinstance(commit, Commit):
raise NotCommitError(commit)
return commit
def get_type(self, name, version="HEAD"):
commit = self._get_commit(version)
tree = self.repo.tree(commit.tree)
if name not in tree:
raise KeyError('Cannot find object "%s"' % name)
if tree[name][0] & stat.S_IFDIR:
return "tree"
else:
return "blob"
def get_path(self, name, version="HEAD", path_type=None, out_name=None,
out_suffix=''):
if path_type is None:
path_type = self.get_type(name, version)
if path_type == 'tree':
return self.get_dir(name, version, out_name, out_suffix)
elif path_type == 'blob':
return self.get_file(name, version, out_name, out_suffix)
raise TypeError("Unknown path type '%s'" % path_type)
def _write_blob(self, blob_sha, out_fname=None, out_suffix=''):
if out_fname is None:
# create a temporary file
(fd, out_fname) = tempfile.mkstemp(suffix=out_suffix,
prefix='vt_persist')
os.close(fd)
self.temp_persist_files.append(out_fname)
else:
out_dirname = os.path.dirname(out_fname)
if out_dirname and not os.path.exists(out_dirname):
os.makedirs(out_dirname)
blob = self.repo.get_blob(blob_sha)
with open(out_fname, "wb") as f:
for b in blob.as_raw_chunks():
f.write(b)
return out_fname
def get_file(self, name, version="HEAD", out_fname=None,
out_suffix=''):
commit = self._get_commit(version)
tree = self.repo.tree(commit.tree)
if name not in tree:
raise KeyError('Cannot find blob "%s"' % name)
blob_sha = tree[name][1]
out_fname = self._write_blob(blob_sha, out_fname, out_suffix)
return out_fname
def get_dir(self, name, version="HEAD", out_dirname=None,
out_suffix=''):
if out_dirname is None:
# create a temporary directory
out_dirname = tempfile.mkdtemp(suffix=out_suffix,
prefix='vt_persist')
self.temp_persist_files.append(out_dirname)
elif not os.path.exists(out_dirname):
os.makedirs(out_dirname)
commit = self._get_commit(version)
tree = self.repo.tree(commit.tree)
if name not in tree:
raise KeyError('Cannot find tree "%s"' % name)
subtree_id = tree[name][1]
# subtree = self.repo.tree(subtree_id)
for entry in self.repo.object_store.iter_tree_contents(subtree_id):
out_fname = os.path.join(out_dirname, entry.path)
self._write_blob(entry.sha, out_fname)
return out_dirname
def get_hash(self, name, version="HEAD", path_type=None):
commit = self._get_commit(version)
tree = self.repo.tree(commit.tree)
if name not in tree:
raise KeyError('Cannot find object "%s"' % name)
return tree[name][1]
@staticmethod
def compute_blob_hash(fname, chunk_size=1<<16):
obj_len = os.path.getsize(fname)
head = object_header(Blob.type_num, obj_len)
with open(fname, "rb") as f:
def read_chunk():
return f.read(chunk_size)
my_iter = chain([head], iter(read_chunk,''))
return iter_sha1(my_iter)
@staticmethod
def compute_tree_hash(dirname):
tree = Tree()
for entry in sorted(os.listdir(dirname)):
fname = os.path.join(dirname, entry)
if os.path.isdir(fname):
thash = GitRepo.compute_tree_hash(fname)
mode = stat.S_IFDIR # os.stat(fname)[stat.ST_MODE]
tree.add(entry, mode, thash)
elif os.path.isfile(fname):
bhash = GitRepo.compute_blob_hash(fname)
mode = os.stat(fname)[stat.ST_MODE]
tree.add(entry, mode, bhash)
return tree.id
@staticmethod
def compute_hash(path):
if os.path.isdir(path):
return GitRepo.compute_tree_hash(path)
elif os.path.isfile(path):
return GitRepo.compute_blob_hash(path)
raise TypeError("Do not support this type of path")
def get_latest_version(self, path):
head = self.repo.head()
walker = Walker(self.repo.object_store, [head], max_entries=1,
paths=[path])
return iter(walker).next().commit.id
def _stage(self, filename):
fullpath = os.path.join(self.repo.path, filename)
if os.path.islink(fullpath):
debug.warning("Warning: not staging symbolic link %s" % os.path.basename(filename))
elif os.path.isdir(fullpath):
for f in os.listdir(fullpath):
self._stage(os.path.join(filename, f))
else:
if os.path.sep != '/':
filename = filename.replace(os.path.sep, '/')
self.repo.stage(filename)
def add_commit(self, filename):
self.setup_git()
self._stage(filename)
commit_id = self.repo.do_commit('Updated %s' % filename)
return commit_id
def setup_git(self):
config_stack = self.repo.get_config_stack()
try:
config_stack.get(('user',), 'name')
config_stack.get(('user',), 'email')
except KeyError:
from vistrails.core.system import current_user
from dulwich.config import ConfigFile
user = current_user()
repo_conf = self.repo.get_config()
repo_conf.set(('user',), 'name', user)
repo_conf.set(('user',), 'email', '%s@localhost' % user)
repo_conf.write_to_path()
current_repo = None
def get_current_repo():
return current_repo
def set_current_repo(repo):
global current_repo
current_repo = repo
def get_repo(path):
return GitRepo(path)
def run_get_file_test():
r = GitRepo("/vistrails/src/git")
r.get_file("README.md", "HEAD", "testREADMEout.md")
def run_get_dir_test():
r = GitRepo("/vistrails/src/git")
r.get_dir("scripts", "HEAD", "testScriptsout")
def run_get_type_test():
r = GitRepo("/vistrails/src/git")
print "README.md:", r.get_type("README.md")
print "scripts:", r.get_type("scripts")
def run_compute_sha_test():
r = GitRepo("/vistrails/src/git")
print r.get_hash("README.md")
# print r.compute_blob_hash("/Users/dakoop/Downloads/xcode_2.4.1_8m1910_6936315.dmg")
print r.compute_blob_hash("/vistrails/src/git/README.md")
def run_compute_sha_dir_test():
r = GitRepo("/vistrails/src/git")
print r.get_hash("scripts")
print r.compute_tree_hash("/vistrails/src/git/scripts")
def run_get_latest_test():
r = GitRepo("/vistrails/src/git")
print r.get_latest_version("README.md")
def run_init_add_test():
r = GitRepo("/Users/dakoop/.vistrails/git_test")
shutil.copy("/vistrails/src/git/README.md",
"/Users/dakoop/.vistrails/git_test")
print r.add_commit("README.md")
if __name__ == '__main__':
run_init_add_test()
| Nikea/VisTrails | vistrails/packages/persistence/repo.py | Python | bsd-3-clause | 10,278 |
# This example is meant to be used from within the CadQuery module of FreeCAD.
import cadquery
from Helpers import show
# Create a simple block with a hole through it that we can split
c = cadquery.Workplane("XY").box(1, 1, 1).faces(">Z").workplane() \
.circle(0.25).cutThruAll()
# Cut the block in half sideways
result = c.faces(">Y").workplane(-0.5).split(keepTop=True)
# Render the solid
show(result)
| microelly2/cadquery-freecad-module | CadQuery/Examples/Ex021_Splitting_an_Object.py | Python | lgpl-3.0 | 435 |
import time
from sleekxmpp.test import *
class TestStreamPresence(SleekTest):
"""
Test handling roster updates.
"""
def tearDown(self):
self.stream_close()
def testInitialUnavailablePresences(self):
"""
Test receiving unavailable presences from JIDs that
are not online.
"""
events = set()
def got_offline(presence):
# The got_offline event should not be triggered.
events.add('got_offline')
def unavailable(presence):
# The presence_unavailable event should be triggered.
events.add('unavailable')
self.stream_start()
self.xmpp.add_event_handler('got_offline', got_offline)
self.xmpp.add_event_handler('presence_unavailable', unavailable)
self.recv("""
<presence type="unavailable" from="otheruser@localhost" />
""")
# Give event queue time to process.
time.sleep(0.1)
self.assertEqual(events, set(('unavailable',)),
"Got offline incorrectly triggered: %s." % events)
def testGotOffline(self):
"""Test that got_offline is triggered properly."""
events = []
def got_offline(presence):
events.append('got_offline')
self.stream_start()
self.xmpp.add_event_handler('got_offline', got_offline)
# Setup roster. Use a 'set' instead of 'result' so we
# don't have to handle get_roster() blocking.
#
# We use the stream to initialize the roster to make
# the test independent of the roster implementation.
self.recv("""
<iq type="set">
<query xmlns="jabber:iq:roster">
<item jid="otheruser@localhost"
name="Other User"
subscription="both">
<group>Testers</group>
</item>
</query>
</iq>
""")
# Contact comes online.
self.recv("""
<presence from="otheruser@localhost/foobar" />
""")
# Contact goes offline, should trigger got_offline.
self.recv("""
<presence from="otheruser@localhost/foobar"
type="unavailable" />
""")
# Give event queue time to process.
time.sleep(0.1)
self.assertEqual(events, ['got_offline'],
"Got offline incorrectly triggered: %s" % events)
def testGotOnline(self):
"""Test that got_online is triggered properly."""
events = set()
def presence_available(p):
events.add('presence_available')
def got_online(p):
events.add('got_online')
self.stream_start()
self.xmpp.add_event_handler('presence_available', presence_available)
self.xmpp.add_event_handler('got_online', got_online)
self.recv("""
<presence from="user@localhost" />
""")
# Give event queue time to process.
time.sleep(0.1)
expected = set(('presence_available', 'got_online'))
self.assertEqual(events, expected,
"Incorrect events triggered: %s" % events)
def testAutoAuthorizeAndSubscribe(self):
"""
Test auto authorizing and auto subscribing
to subscription requests.
"""
events = set()
def presence_subscribe(p):
events.add('presence_subscribe')
def changed_subscription(p):
events.add('changed_subscription')
self.stream_start(jid='tester@localhost')
self.xmpp.add_event_handler('changed_subscription',
changed_subscription)
self.xmpp.add_event_handler('presence_subscribe',
presence_subscribe)
# With these settings we should accept a subscription
# and request a subscription in return.
self.xmpp.auto_authorize = True
self.xmpp.auto_subscribe = True
self.recv("""
<presence from="user@localhost" type="subscribe" />
""")
self.send("""
<presence to="user@localhost" type="subscribed" />
""")
self.send("""
<presence to="user@localhost" type="subscribe" />
""")
expected = set(('presence_subscribe', 'changed_subscription'))
self.assertEqual(events, expected,
"Incorrect events triggered: %s" % events)
def testNoAutoAuthorize(self):
"""Test auto rejecting subscription requests."""
events = set()
def presence_subscribe(p):
events.add('presence_subscribe')
def changed_subscription(p):
events.add('changed_subscription')
self.stream_start(jid='tester@localhost')
self.xmpp.add_event_handler('changed_subscription',
changed_subscription)
self.xmpp.add_event_handler('presence_subscribe',
presence_subscribe)
# With this setting we should reject all subscriptions.
self.xmpp.auto_authorize = False
self.recv("""
<presence from="user@localhost" type="subscribe" />
""")
self.send("""
<presence to="user@localhost" type="unsubscribed" />
""")
expected = set(('presence_subscribe', 'changed_subscription'))
self.assertEqual(events, expected,
"Incorrect events triggered: %s" % events)
suite = unittest.TestLoader().loadTestsFromTestCase(TestStreamPresence)
| skinkie/SleekXMPP--XEP-0080- | tests/test_stream_presence.py | Python | mit | 5,606 |
# Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
An implementation of a key manager that reads its key from the project's
configuration options.
This key manager implementation provides limited security, assuming that the
key remains secret. Using the volume encryption feature as an example,
encryption provides protection against a lost or stolen disk, assuming that
the configuration file that contains the key is not stored on the disk.
Encryption also protects the confidentiality of data as it is transmitted via
iSCSI from the compute host to the storage host (again assuming that an
attacker who intercepts the data does not know the secret key).
Because this implementation uses a single, fixed key, it proffers no
protection once that key is compromised. In particular, different volumes
encrypted with a key provided by this key manager actually share the same
encryption key so *any* volume can be decrypted once the fixed key is known.
"""
import array
from oslo.config import cfg
from cinder import exception
from cinder.keymgr import key
from cinder.keymgr import key_mgr
from cinder.openstack.common.gettextutils import _
from cinder.openstack.common import log as logging
key_mgr_opts = [
cfg.StrOpt('fixed_key',
help='Fixed key returned by key manager, specified in hex'),
]
CONF = cfg.CONF
CONF.register_opts(key_mgr_opts, group='keymgr')
LOG = logging.getLogger(__name__)
class ConfKeyManager(key_mgr.KeyManager):
"""Key Manager that supports one key defined by the fixed_key conf option.
This key manager implementation supports all the methods specified by the
key manager interface. This implementation creates a single key in response
to all invocations of create_key. Side effects (e.g., raising exceptions)
for each method are handled as specified by the key manager interface.
"""
def __init__(self):
super(ConfKeyManager, self).__init__()
self.key_id = '00000000-0000-0000-0000-000000000000'
def _generate_key(self, **kwargs):
_hex = self._generate_hex_key(**kwargs)
return key.SymmetricKey('AES',
array.array('B', _hex.decode('hex')).tolist())
def _generate_hex_key(self, **kwargs):
if CONF.keymgr.fixed_key is None:
LOG.warn(_('config option keymgr.fixed_key has not been defined: '
'some operations may fail unexpectedly'))
raise ValueError(_('keymgr.fixed_key not defined'))
return CONF.keymgr.fixed_key
def create_key(self, ctxt, **kwargs):
"""Creates a key.
This implementation returns a UUID for the created key. A
NotAuthorized exception is raised if the specified context is None.
"""
if ctxt is None:
raise exception.NotAuthorized()
return self.key_id
def store_key(self, ctxt, key, **kwargs):
"""Stores (i.e., registers) a key with the key manager."""
if ctxt is None:
raise exception.NotAuthorized()
if key != self._generate_key():
raise exception.KeyManagerError(
reason="cannot store arbitrary keys")
return self.key_id
def copy_key(self, ctxt, key_id, **kwargs):
if ctxt is None:
raise exception.NotAuthorized()
return self.key_id
def get_key(self, ctxt, key_id, **kwargs):
"""Retrieves the key identified by the specified id.
This implementation returns the key that is associated with the
specified UUID. A NotAuthorized exception is raised if the specified
context is None; a KeyError is raised if the UUID is invalid.
"""
if ctxt is None:
raise exception.NotAuthorized()
if key_id != self.key_id:
raise KeyError(key_id)
return self._generate_key()
def delete_key(self, ctxt, key_id, **kwargs):
if ctxt is None:
raise exception.NotAuthorized()
if key_id != self.key_id:
raise exception.KeyManagerError(
reason="cannot delete non-existent key")
LOG.warn(_("Not deleting key %s"), key_id)
| Thingee/cinder | cinder/keymgr/conf_key_mgr.py | Python | apache-2.0 | 4,809 |
# --- Day 13: Knights of the Dinner Table ---
#
# In years past, the holiday feast with your family hasn't gone so well. Not everyone gets along! This year, you
# resolve, will be different. You're going to find the optimal seating arrangement and avoid all those awkward
# conversations.
#
# You start by writing up a list of everyone invited and the amount their happiness would increase or decrease if they
# were to find themselves sitting next to each other person. You have a circular table that will be just big enough to
# fit everyone comfortably, and so each person will have exactly two neighbors.
#
# For example, suppose you have only four attendees planned, and you calculate their potential happiness as follows:
#
# Alice would gain 54 happiness units by sitting next to Bob.
# Alice would lose 79 happiness units by sitting next to Carol.
# Alice would lose 2 happiness units by sitting next to David.
# Bob would gain 83 happiness units by sitting next to Alice.
# Bob would lose 7 happiness units by sitting next to Carol.
# Bob would lose 63 happiness units by sitting next to David.
# Carol would lose 62 happiness units by sitting next to Alice.
# Carol would gain 60 happiness units by sitting next to Bob.
# Carol would gain 55 happiness units by sitting next to David.
# David would gain 46 happiness units by sitting next to Alice.
# David would lose 7 happiness units by sitting next to Bob.
# David would gain 41 happiness units by sitting next to Carol.
#
# Then, if you seat Alice next to David, Alice would lose 2 happiness units (because David talks so much), but David
# would gain 46 happiness units (because Alice is such a good listener), for a total change of 44.
#
# If you continue around the table, you could then seat Bob next to Alice (Bob gains 83, Alice gains 54). Finally,
# seat Carol, who sits next to Bob (Carol gains 60, Bob loses 7) and David (Carol gains 55, David gains 41). The
# arrangement looks like this:
#
# +41 +46
# +55 David -2
# Carol Alice
# +60 Bob +54
# -7 +83
#
# After trying every other seating arrangement in this hypothetical scenario, you find that this one is the most
# optimal, with a total change in happiness of 330.
#
# What is the total change in happiness for the optimal seating arrangement of the actual guest list?
#
# --- Part Two ---
#
# In all the commotion, you realize that you forgot to seat yourself. At this point, you're pretty apathetic toward
# the whole thing, and your happiness wouldn't really go up or down regardless of who you sit next to. You assume
# everyone else would be just as ambivalent about sitting next to you, too.
#
# So, add yourself to the list, and give all happiness relationships that involve you a score of 0.
#
# What is the total change in happiness for the optimal seating arrangement that actually includes yourself?
import itertools
def get_happiest_plan(happy_values):
seating_plans = list(itertools.permutations(list(happy_values.keys())))
happiest_plan = 0
for plan in seating_plans:
happy_points = 0
for i in range(len(plan)-1):
happy_points += happy_values[plan[i]][plan[i+1]]
happy_points += happy_values[plan[i+1]][plan[i]]
# Complete the circle
happy_points += happy_values[plan[len(plan)-1]][plan[0]]
happy_points += happy_values[plan[0]][plan[len(plan)-1]]
if happy_points > happiest_plan:
happiest_plan = happy_points
return happiest_plan
data = open("day13_input").read().split("\n")
happiness_values = {}
# Build dictionary of dictionaries mapping happy points for each person
for line in data:
l = line.replace(" would gain ", " ") \
.replace(" would lose ", " -") \
.replace(" happiness units by sitting next to ", " ") \
.replace(".", "") \
.split(" ")
if len(l) == 3:
if l[0] not in happiness_values:
happiness_values[l[0]] = {}
happiness_values[l[0]][l[2]] = int(l[1])
print(get_happiest_plan(happiness_values))
# Part 2
# Add me to the dictionary and sub-dictionaries
happiness_values["Me"] = {}
for h in happiness_values:
happiness_values[h]["Me"] = 0
happiness_values["Me"][h] = 0
print(get_happiest_plan(happiness_values))
| hubbardgary/AdventOfCode | day13.py | Python | mit | 4,282 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012 Dag Wieers <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: debug
short_description: Print statements during execution
description:
- This module prints statements during execution and can be useful
for debugging variables or expressions without necessarily halting
the playbook.
- Useful for debugging together with the 'when:' directive.
- This module is also supported for Windows targets.
version_added: '0.8'
options:
msg:
description:
- The customized message that is printed. If omitted, prints a generic message.
type: str
default: 'Hello world!'
var:
description:
- A variable name to debug.
- Mutually exclusive with the C(msg) option.
- Be aware that this option already runs in Jinja2 context and has an implicit C({{ }}) wrapping,
so you should not be using Jinja2 delimiters unless you are looking for double interpolation.
type: str
verbosity:
description:
- A number that controls when the debug is run, if you set to 3 it will only run debug when -vvv or above
type: int
default: 0
version_added: '2.1'
notes:
- This module is also supported for Windows targets.
seealso:
- module: ansible.builtin.assert
- module: ansible.builtin.fail
author:
- Dag Wieers (@dagwieers)
- Michael DeHaan
'''
EXAMPLES = r'''
# Example that prints the loopback address and gateway for each host
- debug:
msg: System {{ inventory_hostname }} has uuid {{ ansible_product_uuid }}
- debug:
msg: System {{ inventory_hostname }} has gateway {{ ansible_default_ipv4.gateway }}
when: ansible_default_ipv4.gateway is defined
# Example that prints return information from the previous task
- shell: /usr/bin/uptime
register: result
- debug:
var: result
verbosity: 2
- name: Display all variables/facts known for a host
debug:
var: hostvars[inventory_hostname]
verbosity: 4
# Example that prints two lines of messages, but only if there is an environment value set
- debug:
msg:
- "Provisioning based on YOUR_KEY which is: {{ lookup('env', 'YOUR_KEY') }}"
- "These servers were built using the password of '{{ password_used }}'. Please retain this for later use."
'''
| j-carl/ansible | lib/ansible/modules/debug.py | Python | gpl-3.0 | 2,427 |
import libtcodpy as libtcod
import consts
def find_closest_target(caster, entities, range):
closest_target = None
closest_dist = range + 1
for obj in entities:
if obj.fighter and obj != caster:
dist = caster.distance_to(obj)
if dist < closest_dist:
closest_target = obj
closest_dist = dist
return closest_target
def random_choice(chances):
dice = libtcod.random_get_int(0, 1, sum(chances))
running_sum = 0
choice = 0
for c in chances:
running_sum += c
if dice <= running_sum:
return choice
choice += 1
def random_choice_dict(chances_dict):
chances = list(chances_dict.values())
strings = list(chances_dict.keys())
return strings[random_choice(chances)]
def from_dungeon_level(table, dungeon_level):
for (value, level) in reversed(table):
if dungeon_level >= level:
return value
return 0
def build_leveled_item_list(level):
item_chances = {}
item_chances[consts.ITEM_HEALTHPOTION_NAME] = consts.ITEM_HEALTHPOTION_SPAWNRATE
item_chances[consts.ITEM_SCROLLLIGHTNING_NAME] = from_dungeon_level(consts.ITEM_SCROLLLIGHTNING_SPAWNRATE, level)
item_chances[consts.ITEM_SCROLLCONFUSE_NAME] = from_dungeon_level(consts.ITEM_SCROLLCONFUSE_SPAWNRATE, level)
item_chances[consts.ITEM_SWORDCOPPER_NAME] = from_dungeon_level(consts.ITEM_SWORDCOPPER_SPAWNRATE, level)
item_chances[consts.ITEM_BUCKLERCOPPER_NAME] = from_dungeon_level(consts.ITEM_BUCKLERCOPPER_SPAWNRATE, level)
return item_chances
def build_leveled_mob_list(level):
mob_chances = {}
mob_chances[consts.MOB_KOBOLD_NAME] = consts.MOB_KOBOLD_SPAWNRATE
mob_chances[consts.MOB_SKELETON_NAME] = consts.MOB_SKELETON_SPAWNRATE
mob_chances[consts.MOB_ORC_NAME] = from_dungeon_level(consts.MOB_ORC_SPAWNRATE, level)
return mob_chances
def get_equipped_in_slot(inventory, slot_to_check):
for obj in inventory:
if obj.equipment and obj.equipment.slot == slot_to_check \
and obj.equipment.is_equipped:
return obj.equipment
return None
def get_all_equipped(inventory):
equipped_list = []
for item in inventory:
if item.equipment and item.equipment.is_equipped:
equipped_list.append(item.equipment)
return equipped_list | MykeMcG/SummerRoguelike | src/utils.py | Python | gpl-3.0 | 2,363 |
"""
Script to generate "labelled" data using random playouts
A lot of this can now be done with the RunExperiment scene
"""
from models.game import *
from models.data import DatabaseConnection as DB, GameDataModel
import timeit
def full_game_experiment(total_games, purge=10):
"""
Simulates many random games from start to finish and records each in the database
Board states that have fewer than "purge" records will be removed from the database
:return: None
"""
p1_wins = 0
p2_wins = 0
ties = 0
total_games = total_games
total_moves = 0
p1 = BogoBot(Board.X)
p2 = BogoBot(Board.O)
print("Playing %s games... \n" % total_games)
start_time = timeit.default_timer()
for i in list(range(0, total_games)):
print("Playing game %s..." % (i+1))
game = Game(p1, p2)
winner = game.finish_game()
if winner == Board.X:
p1_wins += 1
elif winner == Board.O:
p2_wins += 1
else:
ties += 1
total_moves += len(game.moves)
game_data = GameDataModel(game)
game_data.save()
if purge > 0:
DB.purge_boards(purge)
DB.close()
elapsed = timeit.default_timer() - start_time
print("Done in %s s \n" % elapsed)
print("Player 1 Won %s %% of the games" % round(p1_wins*100 / total_games))
print("Player 2 Won %s %% of the games" % round(p2_wins*100 / total_games))
print("The Cat got %s %% of the games" % round(ties*100 / total_games))
print("Average number of moves: %s" % round(total_moves / total_games))
def mid_game_experiment(starting_boards, games_per_board, purge=10):
STARTING_BOARDS = starting_boards
GAMES_PER_BOARD = games_per_board
MOVE_SEQUENCE_LENGTH = 25
print("Generating mid-game data for %s boards..." % STARTING_BOARDS)
p1 = BogoBot(Board.X)
p2 = BogoBot(Board.O)
print("Generating move sequences...")
move_sequences = []
for i in list(range(0, STARTING_BOARDS)):
game = Game(p1, p2)
# generate a move sequence that will take us to this board
sequence = []
for j in list(range(0, MOVE_SEQUENCE_LENGTH)):
move = game._take_step()
sequence.append(move)
move_sequences.append(sequence)
# we now have several move sequences that will take us to a fixed mid-game state - generate data for each of these
for idx, sequence in enumerate(move_sequences):
print("Generating data for move sequence %s" % (idx + 1))
for experiment in list(range(0, GAMES_PER_BOARD)):
game = Game(p1, p2)
for move in sequence: # bring the game to its mid-completed state
game.make_move(move)
# finish the game randomly and save
game.finish_game()
game_data = GameDataModel(game)
game_data.save()
# remove all the "junk" data that we don't need - this keeps the database from growing too large when long experiments are run
DB.purge_boards(purge)
print("Done!")
DB.close()
def late_game_experiment(starting_boards, games_per_board, purge=10):
STARTING_BOARDS = starting_boards
GAMES_PER_BOARD = games_per_board
MOVE_SEQUENCE_LENGTH = 45
print("Generating late-game data for %s boards..." % STARTING_BOARDS)
p1 = BogoBot(Board.X)
p2 = BogoBot(Board.O)
print("Generating move sequences...")
move_sequences = []
i = 0
while i < STARTING_BOARDS:
game = Game(p1, p2)
# generate a move sequence that will take us to this board
sequence = []
for j in list(range(0, MOVE_SEQUENCE_LENGTH)):
move = game._take_step()
sequence.append(move)
if not game.is_game_over():
move_sequences.append(sequence)
i += 1
# we now have several move sequences that will take us to a fixed mid-game state - generate data for each of these
for idx, sequence in enumerate(move_sequences):
print("Generating data for move sequence %s" % (idx + 1))
for experiment in list(range(0, GAMES_PER_BOARD)):
game = Game(p1, p2)
for move in sequence: # bring the game to its mid-completed state
game.make_move(move)
# finish the game randomly and save
game.finish_game()
game_data = GameDataModel(game)
game_data.save()
# remove all the "junk" data that we don't need - this keeps the database from growing too large when long experiments are run
DB.purge_boards(purge)
print("Done!")
DB.close()
# full_game_experiment(10)
# mid_game_experiment(1, 15)
# late_game_experiment(75, 100) | zachdj/ultimate-tic-tac-toe | generate_data.py | Python | mit | 4,760 |
from socket import inet_ntoa
from struct import pack
def calcDottedNetmask(mask):
bits = 0
for i in xrange(32 - mask, 32):
bits |= (1 << i)
packed_value = pack('!I', bits)
addr = inet_ntoa(packed_value)
return addr
| openbmc/openbmc-test-automation | lib/pythonutil.py | Python | apache-2.0 | 245 |
"""Support for WiLight lights."""
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
LightEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from . import WiLightDevice
from .const import (
DOMAIN,
ITEM_LIGHT,
LIGHT_COLOR,
LIGHT_DIMMER,
LIGHT_ON_OFF,
SUPPORT_NONE,
)
def entities_from_discovered_wilight(hass, api_device):
"""Parse configuration and add WiLight light entities."""
entities = []
for item in api_device.items:
if item["type"] != ITEM_LIGHT:
continue
index = item["index"]
item_name = item["name"]
if item["sub_type"] == LIGHT_ON_OFF:
entity = WiLightLightOnOff(api_device, index, item_name)
elif item["sub_type"] == LIGHT_DIMMER:
entity = WiLightLightDimmer(api_device, index, item_name)
elif item["sub_type"] == LIGHT_COLOR:
entity = WiLightLightColor(api_device, index, item_name)
else:
continue
entities.append(entity)
return entities
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
):
"""Set up WiLight lights from a config entry."""
parent = hass.data[DOMAIN][entry.entry_id]
# Handle a discovered WiLight device.
entities = entities_from_discovered_wilight(hass, parent.api)
async_add_entities(entities)
class WiLightLightOnOff(WiLightDevice, LightEntity):
"""Representation of a WiLights light on-off."""
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_NONE
@property
def is_on(self):
"""Return true if device is on."""
return self._status.get("on")
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._client.turn_on(self._index)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._client.turn_off(self._index)
class WiLightLightDimmer(WiLightDevice, LightEntity):
"""Representation of a WiLights light dimmer."""
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return int(self._status.get("brightness", 0))
@property
def is_on(self):
"""Return true if device is on."""
return self._status.get("on")
async def async_turn_on(self, **kwargs):
"""Turn the device on,set brightness if needed."""
# Dimmer switches use a range of [0, 255] to control
# brightness. Level 255 might mean to set it to previous value
if ATTR_BRIGHTNESS in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
await self._client.set_brightness(self._index, brightness)
else:
await self._client.turn_on(self._index)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._client.turn_off(self._index)
def wilight_to_hass_hue(value):
"""Convert wilight hue 1..255 to hass 0..360 scale."""
return min(360, round((value * 360) / 255, 3))
def hass_to_wilight_hue(value):
"""Convert hass hue 0..360 to wilight 1..255 scale."""
return min(255, round((value * 255) / 360))
def wilight_to_hass_saturation(value):
"""Convert wilight saturation 1..255 to hass 0..100 scale."""
return min(100, round((value * 100) / 255, 3))
def hass_to_wilight_saturation(value):
"""Convert hass saturation 0..100 to wilight 1..255 scale."""
return min(255, round((value * 255) / 100))
class WiLightLightColor(WiLightDevice, LightEntity):
"""Representation of a WiLights light rgb."""
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS | SUPPORT_COLOR
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return int(self._status.get("brightness", 0))
@property
def hs_color(self):
"""Return the hue and saturation color value [float, float]."""
return [
wilight_to_hass_hue(int(self._status.get("hue", 0))),
wilight_to_hass_saturation(int(self._status.get("saturation", 0))),
]
@property
def is_on(self):
"""Return true if device is on."""
return self._status.get("on")
async def async_turn_on(self, **kwargs):
"""Turn the device on,set brightness if needed."""
# Brightness use a range of [0, 255] to control
# Hue use a range of [0, 360] to control
# Saturation use a range of [0, 100] to control
if ATTR_BRIGHTNESS in kwargs and ATTR_HS_COLOR in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
hue = hass_to_wilight_hue(kwargs[ATTR_HS_COLOR][0])
saturation = hass_to_wilight_saturation(kwargs[ATTR_HS_COLOR][1])
await self._client.set_hsb_color(self._index, hue, saturation, brightness)
elif ATTR_BRIGHTNESS in kwargs and ATTR_HS_COLOR not in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
await self._client.set_brightness(self._index, brightness)
elif ATTR_BRIGHTNESS not in kwargs and ATTR_HS_COLOR in kwargs:
hue = hass_to_wilight_hue(kwargs[ATTR_HS_COLOR][0])
saturation = hass_to_wilight_saturation(kwargs[ATTR_HS_COLOR][1])
await self._client.set_hs_color(self._index, hue, saturation)
else:
await self._client.turn_on(self._index)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._client.turn_off(self._index)
| tchellomello/home-assistant | homeassistant/components/wilight/light.py | Python | apache-2.0 | 5,891 |
# -*- coding: utf-8 -*-
import re
PORT = 443
class XcodeServer(object):
def __init__(self, host=None, port=PORT):
self.host = host
self.port = port
self.validate()
def validate(self):
if not self.host:
return
if not self.url_validate():
raise RuntimeError("Host is not a URL, please past a valid host, examples:\n" +
"- http://10.55.55.50\n" +
"- https://10.55.55.50")
if "/xcode/api" not in self.host:
self.host = self.host + "/xcode/api"
def url_validate(self):
regex = re.compile( r'^(?:http|ftp)s?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
return regex.match(self.host)
| oarrabi/xserverpy | xserverpy/lib/xcode_server.py | Python | bsd-3-clause | 1,086 |
# -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
# xierpa server
# Copyright (c) 2014+ buro@petr.com, www.petr.com, www.xierpa.com
#
# X I E R P A 3
# Distribution by the MIT License.
#
# -----------------------------------------------------------------------------
#
# twistedclient.py
#
import sys, os
from xierpa3.server.base.baseclient import BaseClient
#from xierpa3.server.base.httpclient import HttpClient
from twisted.web.resource import Resource
from twisted.internet import reactor
class TwistedClient(BaseClient, Resource):
def isLeaf(self):
u"""
The ``isLeaf`` method is required for Twisted clients, inheriting from ``Resource``.
"""
return True
def reload(self):
self.showStopLabel()
args = [sys.executable] + sys.argv
new_environ = os.environ.copy()
print args, new_environ
getattr(reactor, 'stop')()
| petrvanblokland/Xierpa3 | xierpa3/server/twistedmatrix/twistedclient.py | Python | mit | 972 |
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'd F Y'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd.m.Y'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' ' # Non-breaking space
# NUMBER_GROUPING =
| 912/M-new | virtualenvironment/experimental/lib/python2.7/site-packages/django/conf/locale/bg/formats.py | Python | gpl-2.0 | 772 |
#!/usr/bin/env python2
#!/Applications/anaconda/envs/root3 python
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 1 13:50:04 2016
@author: mschull
"""
#python
from .search import Search
import os
import subprocess
import glob
import shutil
import pandas as pd
import datetime
import argparse
import getpass
import keyring
import json
from pyproj import Proj
from .utils import folders,search
from .Clients import Client
from .Order import Order
from .OrderTemplate import OrderTemplate
import pycurl
base = os.getcwd()
Folders = folders(base)
modisBase = Folders['modisBase']
landsatSR = Folders['landsatSR']
landsatLAI = Folders['landsatLAI']
landsatTemp = os.path.join(landsatSR,'temp')
if not os.path.exists(landsatTemp):
os.mkdir(landsatTemp)
def getLandsatData(loc,startDate,endDate,auth):
data = {'olitirs8':{"inputs":[],"products": ["sr", "bt","cloud"]},"format":"gtiff",
"plot_statistics":False,"note":""}
with open('order.json', 'w') as outfile:
json.dump(data, outfile)
# build the various handlers to spec
template = OrderTemplate('template')
template.load(path='./order.json' )
order = Order(template, note="Lat%dLon%d-%s_%s" %(int(loc[0]),int(loc[1]),startDate,endDate))
client = Client(auth) # will prompt user for username and password if auth argument not supplied
#downloader = EspaLandsatLocalDownloader('USGS_downloads')
# find cloud free landsat scenes
try:
s = Search()
scenes = s.search(lat=loc[0],lon=loc[1],limit = 100, start_date = startDate,end_date=endDate, cloud_max=5)
l8_tiles=[]
for i in range(len(scenes['results'])):
path = scenes['results'][i]['path']
row = scenes['results'][i]['row']
sceneID = scenes['results'][i]['sceneID']
if sceneID.startswith('LC'):
dataFN = os.path.join(landsatSR,"%s%s" %(path,row),"%s.xml" % sceneID)
if not os.path.exists(dataFN):
l8_tiles.append(sceneID)
else:
files = glob.glob("%s*" % dataFN[:-4])
for file in files:
os.symlink(file,os.path.join(landsatTemp,file.split(os.sep)[-1]))
#shutil.copy(file,landsatTemp)
except:
sceneIDs = search(loc[0],loc[1],startDate, endDate)
l8_tiles=[]
for i in range(len(sceneIDs)):
l8_tiles.append(sceneIDs[i])
print l8_tiles
if l8_tiles:
# order the data
order.add_tiles("olitirs8", l8_tiles)
#order.add_tiles("etm7", l7_tiles)
response = order.submit(client)
# view the servers whole response. which might indicate an ordering error!
print(response)
# assuming there were no order submission errors
orderid = response['orderid']
# now start the downloader!
for download in client.download_order_gen(orderid):
print(download)
# download is a tuple with the filepath, and True if the file
# is a fresh download.
# this is where data pipeline scripts go that can operate
# on files as they are downloaded (generator),
# See the Client class for further documentation.
def getMODISlai(tiles,product,version,startDate,endDate,auth):
subprocess.call(["modis_download.py", "-r", "-U", "%s" % auth[0], "-P",
"%s" % auth[1],"-p", "%s.%s" % (product,version), "-t",
"%s" % tiles,"-s","MOTA", "-f", "%s" % startDate,"-e", "%s" % endDate,
"%s" % modisBase])
def latlon2MODtile(lat,lon):
# reference: https://code.env.duke.edu/projects/mget/wiki/SinusoidalMODIS
p_modis_grid = Proj('+proj=sinu +R=6371007.181 +nadgrids=@null +wktext')
x, y = p_modis_grid(lon, lat)
# or the inverse, from x, y to lon, lat
lon, lat = p_modis_grid(x, y, inverse=True)
tileWidth = 1111950.5196666666
ulx = -20015109.354
uly = -10007554.677
H = (x-ulx)/tileWidth
V = 18-((y-uly)/tileWidth)
return int(V),int(H)
def geotiff2envi():
geotiffConvert = 'GeoTiff2ENVI'
bands = ["blue","green","red","nir","swir1","swir2","cloud"]
l8bands = ["sr_band2","sr_band3","sr_band4","sr_band5","sr_band6","sr_band7","cfmask"]
landsatFiles = glob.glob(os.path.join(landsatTemp,"*.xml"))
for i in range(len(landsatFiles)):
fstem = landsatFiles[i][:-4]
for i in range(len(bands)):
tifFile = fstem+"_%s.tif" % l8bands[i]
datFile = fstem+"_%s.%s.dat" % (l8bands[i],bands[i])
subprocess.call(["%s" % geotiffConvert ,"%s" % tifFile, "%s" % datFile])
def sample():
sample = 'lndlai_sample'
bands = ["blue","green","red","nir","swir1","swir2","cloud"]
l8bands = ["sr_band2","sr_band3","sr_band4","sr_band5","sr_band6","sr_band7","cfmask"]
landsatFiles = glob.glob(os.path.join(landsatTemp,"*.xml"))
for i in range(len(landsatFiles)):
sceneID = landsatFiles[i].split(os.sep)[-1][:-4]
# extract the Landsat doy and year
ldoy = sceneID[13:16]
year = int(sceneID[9:13])
# convert to date
dd = datetime.datetime(year, 1, 1) + datetime.timedelta(int(ldoy) - 1)
date = '%d-%02d-%02d' % (dd.year,dd.month,dd.day)
# find the 4 day MODIS doy prior to the Landsat doy
mdoy = int((int((float(ldoy)-1.)/4.)*4.)+1)
modFiles = glob.glob(os.path.join(modisBase,"MCD15A3.A%s%s.*.hdf" % (year,mdoy)))
fstem = landsatFiles[i][:-4]
laiPath = landsatLAI
if not os.path.exists(laiPath):
os.mkdir(laiPath)
sam_file = os.path.join(laiPath,"SR_LAI.%s.%s.MCD15A3_A%s%s.txt" %(date,sceneID,year,mdoy))
for i in range(len(modFiles)):
fn = os.path.join(laiPath,"slai%s.inp" % i)
file = open(fn, "w")
file.write("LANDSAT_BASE_BLUE = %s_%s.%s.dat\n" % (fstem,l8bands[0],bands[0]))
file.write("LANDSAT_BASE_GREEN = %s_%s.%s.dat\n" % (fstem,l8bands[1],bands[1]))
file.write("LANDSAT_BASE_RED = %s_%s.%s.dat\n" % (fstem,l8bands[2],bands[2]))
file.write("LANDSAT_BASE_NIR = %s_%s.%s.dat\n" % (fstem,l8bands[3],bands[3]))
file.write("LANDSAT_BASE_SWIR1 = %s_%s.%s.dat\n" % (fstem,l8bands[4],bands[4]))
file.write("LANDSAT_BASE_SWIR2 = %s_%s.%s.dat\n" % (fstem,l8bands[5],bands[5]))
file.write("LANDSAT_BASE_CLOUD = %s_%s.%s.dat\n" % (fstem,l8bands[6],bands[6]))
file.write("MODIS_BASE_FILE = %s\n" % modFiles[i])
file.write("SAMPLE_FILE_OUT = %s\n" % sam_file)
file.write("PURE_SAMPLE_TH = 0.2\n")
file.close()
subprocess.call(["%s" % sample , "%s" % fn])
os.remove(os.path.join(laiPath,"slai%s.inp" % i))
def train():
cubist = 'cubist'
landsatFiles = glob.glob(os.path.join(landsatLAI,"*.txt"))
#======combine input data======================================
df = pd.DataFrame(columns=['ulx','uly','blue',
'green','red','nir','swir1','swir2','ndvi','ndwi','lai','weight','satFlag'])
for i in range(len(landsatFiles)):
sam_file = landsatFiles[i]
df = df.append(pd.read_csv(sam_file,delim_whitespace=True,names=['ulx','uly','blue',
'green','red','nir','swir1','swir2','ndvi','ndwi','lai','weight','satFlag']),ignore_index=True)
#=====create filestem.data====================================
df = df[(df.satFlag=='N')]
df = df.sort_values(by='weight')
startDate='200'
endDate = '300'
filestem = os.path.join(landsatLAI,"lndsr_modlai_samples.combined_%s-%s" %(startDate,endDate))
df.to_csv(os.path.join(landsatLAI,filestem+".data"), columns = ['blue','green','red',
'nir','swir1','swir2','ndvi','ndwi','lai','weight'],header=None,
index=None, mode='w', sep="\t", encoding='utf-8')
#====create filestem.names====================================
fn = os.path.join(landsatLAI,"%s.names" % filestem)
file = open(fn, "w")
file.write("lai.\n")
file.write("B1: continuous\n")
file.write("B2: continuous\n")
file.write("B3: continuous\n")
file.write("B4: continuous\n")
file.write("B5: continuous\n")
file.write("B7: continuous\n")
file.write("ndvi: continuous\n")
file.write("ndwi: continuous\n")
file.write("lai: continuous\n")
file.write("case weight: continuous\n")
file.write("attributes excluded: B1, B2, B7, ndvi, ndwi\n")
file.close()
nrules = 5
subprocess.call(["%s" % cubist , "-f" ,"%s" % filestem, "-r", "%d" % nrules, "-u"])
def compute():
lndbio ='lndlai_compute'
bands = ["blue","green","red","nir","swir1","swir2","cloud"]
l8bands = ["sr_band2","sr_band3","sr_band4","sr_band5","sr_band6","sr_band7","cfmask"]
landsatFiles = glob.glob(os.path.join(landsatTemp,"*.xml"))
for i in range(len(landsatFiles)):
sceneID = landsatFiles[i].split(os.sep)[-1][:-4]
fstem = landsatFiles[i][:-4]
# create a folder for lai if it does not exist
#laiPath = os.path.join(landsatLAI,'%s' % sceneID[9:16])
laiPath = os.path.join(landsatLAI,'%s' % sceneID[3:9])
if not os.path.exists(laiPath):
os.mkdir(laiPath)
startDate='200'
endDate = '300'
filestem = os.path.join(landsatLAI,"lndsr_modlai_samples.combined_%s-%s" %(startDate,endDate))
laiFN = os.path.join(landsatLAI,"lndlai.%s.hdf" % sceneID)
fn = os.path.join(landsatLAI,"compute_lai%s.inp")
file = open(fn, "w")
file.write("LANDSAT_BASE_BLUE = %s_%s.%s.dat\n" % (fstem,l8bands[0],bands[0]))
file.write("LANDSAT_BASE_GREEN = %s_%s.%s.dat\n" % (fstem,l8bands[1],bands[1]))
file.write("LANDSAT_BASE_RED = %s_%s.%s.dat\n" % (fstem,l8bands[2],bands[2]))
file.write("LANDSAT_BASE_NIR = %s_%s.%s.dat\n" % (fstem,l8bands[3],bands[3]))
file.write("LANDSAT_BASE_SWIR1 = %s_%s.%s.dat\n" % (fstem,l8bands[4],bands[4]))
file.write("LANDSAT_BASE_SWIR2 = %s_%s.%s.dat\n" % (fstem,l8bands[5],bands[5]))
file.write("LANDSAT_BASE_CLOUD = %s_%s.%s.dat\n" % (fstem,l8bands[6],bands[6]))
file.write("LANDSAT_ANC_FILE = %s\n" % filestem)
file.write("BIOPHYSICS_PARA_FILE_OUT = %s\n" % laiFN)
file.close()
subprocess.call(["%s" % lndbio , "%s" % fn])
shutil.move(laiFN,os.path.join(laiPath,"lndlai.%s.hdf" % sceneID))
os.remove(fn)
#=====CLEANING UP========
filelist = [ f for f in os.listdir(landsatLAI) if f.startswith("lndsr_modlai_samples") ]
for f in filelist:
os.remove(os.path.join(landsatLAI,f))
def getLAI():
# Convert Landsat SR downloads to ENVI format
# Note: May be some warnings about unknown field - ignore.
print("Converting Landsat SR to ENVI format...")
geotiff2envi()
# Generate MODIS-Landsat samples for LAI computation
print("Generating MODIS-Landsat samples...")
sample()
# Compute Landsat LAI
print("Computing Landsat LAI...")
train()
compute()
def main():
# Get time and location from user
parser = argparse.ArgumentParser()
parser.add_argument("lat", type=float, help="latitude")
parser.add_argument("lon", type=float, help="longitude")
parser.add_argument("startDate", type=str, help="Start date yyyy-mm-dd")
parser.add_argument("endDate", type=str, help="Start date yyyy-mm-dd")
args = parser.parse_args()
loc = [args.lat,args.lon]
startDate = args.startDate
endDate = args.endDate
# set project base directory structure
#41.18,-96.43
# =====USGS credentials===============
# need to get this from pop up
usgsUser = str(getpass.getpass(prompt="usgs username:"))
if keyring.get_password("usgs",usgsUser)==None:
usgsPass = str(getpass.getpass(prompt="usgs password:"))
keyring.set_password("usgs",usgsUser,usgsPass)
else:
usgsPass = str(keyring.get_password("usgs",usgsUser))
# =====earthData credentials===============
earthLoginUser = str(getpass.getpass(prompt="earth login username:"))
if keyring.get_password("nasa",earthLoginUser)==None:
earthLoginPass = str(getpass.getpass(prompt="earth login password:"))
keyring.set_password("nasa",earthLoginUser,earthLoginPass)
else:
earthLoginPass = str(keyring.get_password("nasa",earthLoginUser))
#start Landsat order process
getLandsatData(loc,startDate,endDate,("%s"% usgsUser,"%s"% usgsPass))
# find MODIS tiles that cover landsat scene
# MODIS products
product = 'MCD15A3'
version = '005'
[v,h]= latlon2MODtile(args.lat,args.lon)
tiles = "h%02dv%02d" %(h,v)
#tiles = 'h10v04,h10v05'
# download MODIS LAI over the same area and time
print("Downloading MODIS data...")
getMODISlai(tiles,product,version,startDate,endDate,("%s"% earthLoginUser,"%s"% earthLoginPass))
# move surface relectance files and estimate get LAI
downloadFolder = os.path.join(base,'espa_downloads')
folders2move = glob.glob(os.path.join(downloadFolder ,'*'))
for i in range(len(folders2move)):
inputFN = folders2move[i]
sceneID = (inputFN).split(os.sep)[-1].split('-')[0]
scene = sceneID[3:9]
folder = os.path.join(landsatSR,scene)
if not os.path.exists(folder):
os.mkdir(folder)
for filename in glob.glob(os.path.join(inputFN, '*.*')):
shutil.copy(filename, folder)
if len(folders2move)>0:
#======Clean up folder===============================
shutil.rmtree(downloadFolder)
getLAI()
print("All done with LAI")
print("========================================")
print("==============process LST===============")
subprocess.call(["processlst","%s" % earthLoginUser,"%s" % earthLoginPass])
#shutil.rmtree(landsatTemp)
if __name__ == "__main__":
try:
main()
except (KeyboardInterrupt, pycurl.error):
exit('Received Ctrl + C... Exiting! Bye.', 1)
| bucricket/projectMASpreprocess | preparepydisalexi/processLandsatLAI.py | Python | bsd-3-clause | 14,392 |
import numpy as np
import fdasrsf as fs
from scipy.integrate import cumtrapz
from scipy.linalg import norm, expm
import h5py
fun = h5py.File('/home/dtucker/fdasrsf/debug_data_oc_mlogit.h5')
q = fun['q'][:]
y = fun['y'][:]
alpha = fun['alpha'][:]
nu = fun['nu'][:]
max_itr = 8000 # 4000
tol = 1e-4
deltag = .05
deltaO = .08
display = 1
alpha = alpha/norm(alpha)
q, scale = fs.scale_curve(q) # q/norm(q)
for ii in range(0, nu.shape[2]):
nu[:, :, ii], scale = fs.scale_curve(nu[:, :, ii]) # nu/norm(nu)
# python code
n = q.shape[0]
TT = q.shape[1]
m = nu.shape[2]
time = np.linspace(0, 1, TT)
binsize = 1. / (TT - 1)
gam = np.linspace(0, 1, TT)
O = np.eye(n)
O_old = O.copy()
gam_old = gam.copy()
qtilde = q.copy()
# rotation basis (Skew Symmetric)
# E = np.array([[0, -1.], [1., 0]])
# warping basis (Fourier)
p = 20
f_basis = np.zeros((TT, p))
for i in range(0, int(p/2)):
f_basis[:, 2*i] = 1/np.sqrt(np.pi) * np.sin(2*np.pi*(i+1)*time)
f_basis[:, 2*i + 1] = 1/np.sqrt(np.pi) * np.cos(2*np.pi*(i+1)*time)
itr = 0
max_val = np.zeros(max_itr+1)
while itr <= max_itr:
# inner product value
A = np.zeros(m)
for i in range(0, m):
A[i] = fs.innerprod_q2(qtilde, nu[:, :, i])
# form gradient for rotation
# B = np.zeros((n, n, m))
# for i in range(0, m):
# B[:, :, i] = cf.innerprod_q2(E.dot(qtilde), nu[:, :, i]) * E
# tmp1 = np.sum(np.exp(alpha + A))
# tmp2 = np.sum(np.exp(alpha + A) * B, axis=2)
# hO = np.sum(y * B, axis=2) - (tmp2 / tmp1)
# O_new = O_old.dot(expm(deltaO * hO))
theta = np.arccos(O_old[0, 0])
Ograd = np.array([(-1*np.sin(theta), -1*np.cos(theta)),
(np.cos(theta), -1*np.sin(theta))])
B = np.zeros(m)
for i in range(0, m):
B[i] = fs.innerprod_q2(Ograd.dot(qtilde), nu[:, :, i])
tmp1 = np.sum(np.exp(alpha + A))
tmp2 = np.sum(np.exp(alpha + A) * B)
hO = np.sum(y * B) - (tmp2 / tmp1)
O_new = fs.rot_mat(theta+deltaO*hO)
# form gradient for warping
qtilde_diff = np.gradient(qtilde, binsize)
qtilde_diff = qtilde_diff[1]
c = np.zeros((TT, m))
for i in range(0, m):
tmp3 = np.zeros((TT, p))
for j in range(0, p):
cbar = cumtrapz(f_basis[:, j], time, initial=0)
ctmp = 2*qtilde_diff*cbar + qtilde*f_basis[:, j]
tmp3[:, j] = fs.innerprod_q2(ctmp, nu[:, :, i]) * f_basis[:, j]
c[:, i] = np.sum(tmp3, axis=1)
tmp2 = np.sum(np.exp(alpha + A) * c, axis=1)
hpsi = np.sum(y * c, axis=1) - (tmp2 / tmp1)
vecnorm = norm(hpsi)
costmp = np.cos(deltag * vecnorm) * np.ones(TT)
sintmp = np.sin(deltag * vecnorm) * (hpsi / vecnorm)
psi_new = costmp + sintmp
gam_tmp = cumtrapz(psi_new * psi_new, time, initial=0)
gam_tmp = (gam_tmp - gam_tmp[0]) / (gam_tmp[-1] - gam_tmp[0])
gam_new = np.interp(gam_tmp, time, gam_old)
max_val[itr] = np.sum(y * (alpha + A)) - np.log(tmp1)
if display == 1:
print("Iteration %d : Cost %f" % (itr+1, max_val[itr]))
gam_old = gam_new.copy()
O_old = O_new.copy()
qtilde = fs.group_action_by_gamma(O_old.dot(q), gam_old)
if vecnorm < tol and hO < tol:
break
itr += 1
| glemaitre/fdasrsf | debug/debug_warp_ocmlogistic.py | Python | gpl-3.0 | 3,202 |
#!/usr/bin/env python
import fnmatch
import os
from pathlib import Path
import re
import shlex
import shutil
import subprocess
import sys
import tempfile
import time
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
from typing import Optional
import click
import git
import typer
import yaml
from packaging.version import Version
from typer import colors as c
# Editable configuration
DEFAULT_HOST_OS = "cc7"
DEFAULT_MYSQL_VER = "mysql:8.0"
DEFAULT_ES_VER = "elasticsearch:7.9.1"
FEATURE_VARIABLES = {
"DIRACOSVER": "master",
"DIRACOS_TARBALL_PATH": None,
"TEST_HTTPS": "No",
"DIRAC_FEWER_CFG_LOCKS": None,
"DIRAC_USE_JSON_ENCODE": None,
"DIRAC_USE_JSON_DECODE": None,
}
DEFAULT_MODULES = {
"DIRAC": Path(__file__).parent.absolute(),
}
# Static configuration
DB_USER = "Dirac"
DB_PASSWORD = "Dirac"
DB_ROOTUSER = "root"
DB_ROOTPWD = "password"
DB_HOST = "mysql"
DB_PORT = "3306"
# Implementation details
LOG_LEVEL_MAP = {
"ALWAYS": (c.BLACK, c.WHITE),
"NOTICE": (None, c.MAGENTA),
"INFO": (None, c.GREEN),
"VERBOSE": (None, c.CYAN),
"DEBUG": (None, c.BLUE),
"WARN": (None, c.YELLOW),
"ERROR": (None, c.RED),
"FATAL": (c.RED, c.BLACK),
}
LOG_PATTERN = re.compile(r"^[\d\-]{10} [\d:]{8} UTC [^\s]+ ([A-Z]+):")
class NaturalOrderGroup(click.Group):
"""Group for showing subcommands in the correct order"""
def list_commands(self, ctx):
return self.commands.keys()
app = typer.Typer(
cls=NaturalOrderGroup,
help=f"""Run the DIRAC integration tests.
A local DIRAC setup can be created and tested by running:
\b
./integration_tests.py create
This is equivalent to running:
\b
./integration_tests.py prepare-environment
./integration_tests.py install-server
./integration_tests.py install-client
./integration_tests.py test-server
./integration_tests.py test-client
The test setup can be shutdown using:
\b
./integration_tests.py destroy
See below for additional subcommands which are useful during local development.
## Features
The currently known features and their default values are:
\b
HOST_OS: {DEFAULT_HOST_OS!r}
MYSQL_VER: {DEFAULT_MYSQL_VER!r}
ES_VER: {DEFAULT_ES_VER!r}
{(os.linesep + ' ').join(['%s: %r' % x for x in FEATURE_VARIABLES.items()])}
All features can be prefixed with "SERVER_" or "CLIENT_" to limit their scope.
## Extensions
Integration tests can be ran for extensions to DIRAC by specifying the module
name and path such as:
\b
./integration_tests.py create --extra-module MyDIRAC=/path/to/MyDIRAC
This will modify the setup process based on the contents of
`MyDIRAC/tests/.dirac-ci-config.yaml`. See the Vanilla DIRAC file for the
available options.
## Command completion
Command completion of typer based scripts can be enabled by running:
typer --install-completion
After restarting your terminal you command completion is available using:
typer ./integration_tests.py run ...
""",
)
@app.command()
def create(
flags: Optional[list[str]] = typer.Argument(None),
editable: Optional[bool] = None,
extra_module: Optional[list[str]] = None,
release_var: Optional[str] = None,
run_server_tests: bool = True,
run_client_tests: bool = True,
):
"""Start a local instance of the integration tests"""
prepare_environment(flags, editable, extra_module, release_var)
install_server()
install_client()
exit_code = 0
if run_server_tests:
try:
test_server()
except TestExit as e:
exit_code += e.exit_code
else:
raise NotImplementedError()
if run_client_tests:
try:
test_client()
except TestExit as e:
exit_code += e.exit_code
else:
raise NotImplementedError()
if exit_code != 0:
typer.secho("One or more tests failed", err=True, fg=c.RED)
raise typer.Exit(exit_code)
@app.command()
def destroy():
"""Destroy a local instance of the integration tests"""
typer.secho("Shutting down and removing containers", err=True, fg=c.GREEN)
with _gen_docker_compose(DEFAULT_MODULES) as docker_compose_fn:
os.execvpe(
"docker-compose",
["docker-compose", "-f", docker_compose_fn, "down", "--remove-orphans", "-t", "0"],
_make_env({}),
)
@app.command()
def prepare_environment(
flags: Optional[list[str]] = typer.Argument(None),
editable: Optional[bool] = None,
extra_module: Optional[list[str]] = None,
release_var: Optional[str] = None,
):
"""Prepare the local environment for installing DIRAC."""
_check_containers_running(is_up=False)
if editable is None:
editable = sys.stdout.isatty()
typer.secho(
f"No value passed for --[no-]editable, automatically detected: {editable}",
fg=c.YELLOW,
)
typer.echo(f"Preparing environment")
modules = DEFAULT_MODULES | dict(f.split("=", 1) for f in extra_module)
modules = {k: Path(v).absolute() for k, v in modules.items()}
flags = dict(f.split("=", 1) for f in flags)
docker_compose_env = _make_env(flags)
server_flags = {}
client_flags = {}
for key, value in flags.items():
if key.startswith("SERVER_"):
server_flags[key[len("SERVER_") :]] = value
elif key.startswith("CLIENT_"):
client_flags[key[len("CLIENT_") :]] = value
else:
server_flags[key] = value
client_flags[key] = value
server_config = _make_config(modules, server_flags, release_var, editable)
client_config = _make_config(modules, client_flags, release_var, editable)
typer.secho("Running docker-compose to create containers", fg=c.GREEN)
with _gen_docker_compose(modules) as docker_compose_fn:
subprocess.run(
["docker-compose", "-f", docker_compose_fn, "up", "-d"],
check=True,
env=docker_compose_env,
)
typer.secho("Creating users in server and client containers", fg=c.GREEN)
for container_name in ["server", "client"]:
if os.getuid() == 0:
continue
cmd = _build_docker_cmd(container_name, use_root=True, cwd="/")
gid = str(os.getgid())
uid = str(os.getuid())
ret = subprocess.run(cmd + ["groupadd", "--gid", gid, "dirac"], check=False)
if ret.returncode != 0:
typer.secho(f"Failed to add add group dirac with id={gid}", fg=c.YELLOW)
subprocess.run(
cmd
+ [
"useradd",
"--uid",
uid,
"--gid",
gid,
"-s",
"/bin/bash",
"-d",
"/home/dirac",
"dirac",
],
check=True,
)
subprocess.run(cmd + ["chown", "dirac", "/home/dirac"], check=True)
typer.secho("Creating MySQL user", fg=c.GREEN)
cmd = ["docker", "exec", "mysql", "mysql", f"--password={DB_ROOTPWD}", "-e"]
# It sometimes takes a while for MySQL to be ready so wait for a while if needed
for _ in range(10):
ret = subprocess.run(
cmd + [f"CREATE USER '{DB_USER}'@'%' IDENTIFIED BY '{DB_PASSWORD}';"],
check=False,
)
if ret.returncode == 0:
break
typer.secho("Failed to connect to MySQL, will retry in 10 seconds", fg=c.YELLOW)
time.sleep(10)
else:
raise Exception(ret)
subprocess.run(
cmd + [f"CREATE USER '{DB_USER}'@'localhost' IDENTIFIED BY '{DB_PASSWORD}';"],
check=True,
)
subprocess.run(
cmd + [f"CREATE USER '{DB_USER}'@'mysql' IDENTIFIED BY '{DB_PASSWORD}';"],
check=True,
)
typer.secho("Copying files to containers", fg=c.GREEN)
for name, config in [("server", server_config), ("client", client_config)]:
if path := config.get("DIRACOS_TARBALL_PATH"):
path = Path(path)
config["DIRACOS_TARBALL_PATH"] = f"/{path.name}"
subprocess.run(
["docker", "cp", str(path), f"{name}:/{config['DIRACOS_TARBALL_PATH']}"],
check=True,
)
config_as_shell = _dict_to_shell(config)
typer.secho(f"## {name.title()} config is:", fg=c.BRIGHT_WHITE, bg=c.BLACK)
typer.secho(config_as_shell)
with tempfile.TemporaryDirectory() as tmpdir:
path = Path(tmpdir) / "CONFIG"
path.write_text(config_as_shell)
subprocess.run(
["docker", "cp", str(path), f"{name}:/home/dirac"],
check=True,
)
for module_name, module_configs in _load_module_configs(modules).items():
for command in module_configs.get("commands", {}).get("post-prepare", []):
typer.secho(
f"Running post-prepare command for {module_name}: {command}",
err=True,
fg=c.GREEN,
)
subprocess.run(command, check=True, shell=True)
@app.command()
def install_server():
"""Install DIRAC in the server container."""
_check_containers_running()
typer.secho("Running server installation", fg=c.GREEN)
base_cmd = _build_docker_cmd("server", tty=False)
subprocess.run(
base_cmd + ["bash", "/home/dirac/LocalRepo/TestCode/DIRAC/tests/CI/install_server.sh"],
check=True,
)
typer.secho("Copying credentials and certificates", fg=c.GREEN)
base_cmd = _build_docker_cmd("client", tty=False)
subprocess.run(
base_cmd
+ [
"mkdir",
"-p",
"/home/dirac/ServerInstallDIR/user",
"/home/dirac/ClientInstallDIR/etc",
"/home/dirac/.globus",
],
check=True,
)
for path in [
"etc/grid-security",
"user/client.pem",
"user/client.key",
f"/tmp/x509up_u{os.getuid()}",
]:
source = os.path.join("/home/dirac/ServerInstallDIR", path)
ret = subprocess.run(
["docker", "cp", f"server:{source}", "-"],
check=True,
text=False,
stdout=subprocess.PIPE,
)
if path.startswith("user/"):
dest = f"client:/home/dirac/ServerInstallDIR/{os.path.dirname(path)}"
elif path.startswith("/"):
dest = f"client:{os.path.dirname(path)}"
else:
dest = f"client:/home/dirac/ClientInstallDIR/{os.path.dirname(path)}"
subprocess.run(["docker", "cp", "-", dest], check=True, text=False, input=ret.stdout)
subprocess.run(
base_cmd
+ [
"bash",
"-c",
"cp /home/dirac/ServerInstallDIR/user/client.* /home/dirac/.globus/",
],
check=True,
)
@app.command()
def install_client():
"""Install DIRAC in the client container."""
_check_containers_running()
typer.secho("Running client installation", fg=c.GREEN)
base_cmd = _build_docker_cmd("client")
subprocess.run(
base_cmd + ["bash", "/home/dirac/LocalRepo/TestCode/DIRAC/tests/CI/install_client.sh"],
check=True,
)
@app.command()
def test_server():
"""Run the server integration tests."""
_check_containers_running()
typer.secho("Running server tests", err=True, fg=c.GREEN)
base_cmd = _build_docker_cmd("server")
ret = subprocess.run(base_cmd + ["bash", "TestCode/DIRAC/tests/CI/run_tests.sh"], check=False)
color = c.GREEN if ret.returncode == 0 else c.RED
typer.secho(f"Server tests finished with {ret.returncode}", err=True, fg=color)
raise TestExit(ret.returncode)
@app.command()
def test_client():
"""Run the client integration tests."""
_check_containers_running()
typer.secho("Running client tests", err=True, fg=c.GREEN)
base_cmd = _build_docker_cmd("client")
ret = subprocess.run(base_cmd + ["bash", "TestCode/DIRAC/tests/CI/run_tests.sh"], check=False)
color = c.GREEN if ret.returncode == 0 else c.RED
typer.secho(f"Client tests finished with {ret.returncode}", err=True, fg=color)
raise TestExit(ret.returncode)
@app.command()
def exec_server():
"""Start an interactive session in the server container."""
_check_containers_running()
cmd = _build_docker_cmd("server")
cmd += [
"bash",
"-c",
". $HOME/CONFIG && . $HOME/ServerInstallDIR/bashrc && exec bash",
]
typer.secho("Opening prompt inside server container", err=True, fg=c.GREEN)
os.execvp(cmd[0], cmd)
@app.command()
def exec_client():
"""Start an interactive session in the client container."""
_check_containers_running()
cmd = _build_docker_cmd("client")
cmd += [
"bash",
"-c",
". $HOME/CONFIG && . $HOME/ClientInstallDIR/bashrc && exec bash",
]
typer.secho("Opening prompt inside client container", err=True, fg=c.GREEN)
os.execvp(cmd[0], cmd)
@app.command()
def exec_mysql():
"""Start an interactive session in the server container."""
_check_containers_running()
cmd = _build_docker_cmd("mysql", use_root=True, cwd="/")
cmd += [
"bash",
"-c",
f"exec mysql --user={DB_USER} --password={DB_PASSWORD}",
]
typer.secho("Opening prompt inside server container", err=True, fg=c.GREEN)
os.execvp(cmd[0], cmd)
@app.command()
def list_services():
"""List the services which have been running.
Only the services for which /log/current exists are shown.
"""
_check_containers_running()
typer.secho("Known services:", err=True)
for service in _list_services()[1]:
typer.secho(f"* {service}", err=True)
@app.command()
def runsvctrl(command: str, pattern: str):
"""Execute runsvctrl inside the server container."""
_check_containers_running()
runit_dir, services = _list_services()
cmd = _build_docker_cmd("server", cwd=runit_dir)
services = fnmatch.filter(services, pattern)
if not services:
typer.secho(f"No services match {pattern!r}", fg=c.RED)
raise typer.Exit(code=1)
cmd += ["runsvctrl", command] + services
os.execvp(cmd[0], cmd)
@app.command()
def logs(pattern: str = "*", lines: int = 10, follow: bool = True):
"""Show DIRAC's logs from the service container.
For services matching [--pattern] show the most recent [--lines] from the
logs. If [--follow] is True, continiously stream the logs.
"""
_check_containers_running()
runit_dir, services = _list_services()
base_cmd = _build_docker_cmd("server", tty=False) + ["tail"]
base_cmd += [f"--lines={lines}"]
if follow:
base_cmd += ["-f"]
with ThreadPoolExecutor(len(services)) as pool:
for service in fnmatch.filter(services, pattern):
cmd = base_cmd + [f"{runit_dir}/{service}/log/current"]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=None, text=True)
pool.submit(_log_popen_stdout, p)
class TestExit(typer.Exit):
pass
@contextmanager
def _gen_docker_compose(modules):
# Load the docker-compose configuration and mount the necessary volumes
input_fn = Path(__file__).parent / "tests/CI/docker-compose.yml"
docker_compose = yaml.safe_load(input_fn.read_text())
volumes = [f"{path}:/home/dirac/LocalRepo/ALTERNATIVE_MODULES/{name}" for name, path in modules.items()]
volumes += [f"{path}:/home/dirac/LocalRepo/TestCode/{name}" for name, path in modules.items()]
docker_compose["services"]["dirac-server"]["volumes"] = volumes[:]
docker_compose["services"]["dirac-client"]["volumes"] = volumes[:]
# Add any extension services
for module_name, module_configs in _load_module_configs(modules).items():
for service_name, service_config in module_configs["extra-services"].items():
typer.secho(f"Adding service {service_name} for {module_name}", err=True, fg=c.GREEN)
docker_compose["services"][service_name] = service_config.copy()
docker_compose["services"][service_name]["volumes"] = volumes[:]
# Write to a tempory file with the appropriate profile name
prefix = "ci"
with tempfile.TemporaryDirectory() as tmpdir:
input_docker_compose_dir = Path(__file__).parent / "tests/CI/"
output_fn = Path(tmpdir) / prefix / "docker-compose.yml"
output_fn.parent.mkdir()
output_fn.write_text(yaml.safe_dump(docker_compose, sort_keys=False))
shutil.copytree(input_docker_compose_dir / "envs", str(Path(tmpdir) / prefix), dirs_exist_ok=True)
yield output_fn
def _check_containers_running(*, is_up=True):
with _gen_docker_compose(DEFAULT_MODULES) as docker_compose_fn:
running_containers = subprocess.run(
["docker-compose", "-f", docker_compose_fn, "ps", "-q", "-a"],
stdout=subprocess.PIPE,
env=_make_env({}),
check=True,
text=True,
).stdout.split("\n")
if is_up:
if not any(running_containers):
typer.secho(
f"No running containers found, environment must be prepared first!",
err=True,
fg=c.RED,
)
raise typer.Exit(code=1)
else:
if any(running_containers):
typer.secho(
f"Running instance already found, it must be destroyed first!",
err=True,
fg=c.RED,
)
raise typer.Exit(code=1)
def _find_dirac_release_and_branch():
# Start by looking for the GitHub/GitLab environment variables
ref = os.environ.get("CI_COMMIT_REF_NAME", os.environ.get("GITHUB_REF"))
if ref == "refs/heads/integration":
return "integration", ""
ref = os.environ.get("CI_MERGE_REQUEST_TARGET_BRANCH_NAME", os.environ.get("GITHUB_BASE_REF"))
if ref == "integration":
return "integration", ""
repo = git.Repo(os.getcwd())
# Try to make sure the upstream remote is up to date
try:
upstream = repo.remote("upstream")
except ValueError:
typer.secho("No upstream remote found, adding", err=True, fg=c.YELLOW)
upstream = repo.create_remote("upstream", "https://github.com/DIRACGrid/DIRAC.git")
try:
upstream.fetch()
except Exception:
typer.secho("Failed to fetch from remote 'upstream'", err=True, fg=c.YELLOW)
# Find the most recent tag on the current branch
version = Version(
repo.git.describe(
dirty=True,
tags=True,
long=True,
match="*[0-9]*",
exclude=["v[0-9]r*", "v[0-9][0-9]r*"],
).split("-")[0]
)
# See if there is a remote branch named "rel-vXrY"
version_branch = f"rel-v{version.major}r{version.minor}"
try:
upstream.refs[version_branch]
except IndexError:
typer.secho(
f"Failed to find branch for {version_branch}, defaulting to integration",
err=True,
fg=c.YELLOW,
)
return "integration", ""
else:
return "", f"v{version.major}r{version.minor}"
def _make_env(flags):
env = os.environ.copy()
env["DIRAC_UID"] = str(os.getuid())
env["DIRAC_GID"] = str(os.getgid())
env["HOST_OS"] = flags.pop("HOST_OS", DEFAULT_HOST_OS)
env["CI_REGISTRY_IMAGE"] = flags.pop("CI_REGISTRY_IMAGE", "diracgrid")
env["MYSQL_VER"] = flags.pop("MYSQL_VER", DEFAULT_MYSQL_VER)
env["ES_VER"] = flags.pop("ES_VER", DEFAULT_ES_VER)
return env
def _dict_to_shell(variables):
lines = []
for name, value in variables.items():
if value is None:
continue
elif isinstance(value, list):
lines += [f"declare -a {name}"]
lines += [f"{name}+=({shlex.quote(v)})" for v in value]
elif isinstance(value, bool):
lines += [f"export {name}={'Yes' if value else 'No'}"]
elif isinstance(value, str):
lines += [f"export {name}={shlex.quote(value)}"]
else:
raise NotImplementedError(name, value, type(value))
return "\n".join(lines)
def _make_config(modules, flags, release_var, editable):
config = {
"DEBUG": "True",
# MYSQL Settings
"DB_USER": DB_USER,
"DB_PASSWORD": DB_PASSWORD,
"DB_ROOTUSER": DB_ROOTUSER,
"DB_ROOTPWD": DB_ROOTPWD,
"DB_HOST": DB_HOST,
"DB_PORT": DB_PORT,
# ElasticSearch settings
"NoSQLDB_HOST": "elasticsearch",
"NoSQLDB_PORT": "9200",
# Hostnames
"SERVER_HOST": "server",
"CLIENT_HOST": "client",
# Test specific variables
"WORKSPACE": "/home/dirac",
}
if editable:
config["PIP_INSTALL_EXTRA_ARGS"] = "-e"
required_feature_flags = []
for module_name, module_ci_config in _load_module_configs(modules).items():
config |= module_ci_config["config"]
required_feature_flags += module_ci_config.get("required-feature-flags", [])
config["DIRAC_CI_SETUP_SCRIPT"] = "/home/dirac/LocalRepo/TestCode/" + config["DIRAC_CI_SETUP_SCRIPT"]
# This can likely be removed after the Python 3 migration
if release_var:
config |= dict([release_var.split("=", 1)])
else:
config["DIRAC_RELEASE"], config["DIRACBRANCH"] = _find_dirac_release_and_branch()
for key, default_value in FEATURE_VARIABLES.items():
config[key] = flags.pop(key, default_value)
for key in required_feature_flags:
try:
config[key] = flags.pop(key)
except KeyError:
typer.secho(f"Required feature variable {key!r} is missing", err=True, fg=c.RED)
raise typer.Exit(code=1)
config["TESTREPO"] = [f"/home/dirac/LocalRepo/TestCode/{name}" for name in modules]
config["ALTERNATIVE_MODULES"] = [f"/home/dirac/LocalRepo/ALTERNATIVE_MODULES/{name}" for name in modules]
# Exit with an error if there are unused feature flags remaining
if flags:
typer.secho(f"Unrecognised feature flags {flags!r}", err=True, fg=c.RED)
raise typer.Exit(code=1)
return config
def _load_module_configs(modules):
module_ci_configs = {}
for module_name, module_path in modules.items():
module_ci_config_path = module_path / "tests/.dirac-ci-config.yaml"
if not module_ci_config_path.exists():
continue
module_ci_configs[module_name] = yaml.safe_load(module_ci_config_path.read_text())
return module_ci_configs
def _build_docker_cmd(container_name, *, use_root=False, cwd="/home/dirac", tty=True):
if use_root or os.getuid() == 0:
user = "root"
else:
user = "dirac"
cmd = ["docker", "exec"]
if tty:
if sys.stdout.isatty():
cmd += ["-it"]
else:
typer.secho(
'Not passing "-it" to docker as stdout is not a tty',
err=True,
fg=c.YELLOW,
)
cmd += [
"-e=TERM=xterm-color",
"-e=INSTALLROOT=/home/dirac",
f"-e=INSTALLTYPE={container_name}",
f"-u={user}",
f"-w={cwd}",
container_name,
]
return cmd
def _list_services():
# The Python 3 runit dir ends up in /diracos
for runit_dir in ["ServerInstallDIR/runit", "ServerInstallDIR/diracos/runit"]:
cmd = _build_docker_cmd("server")
cmd += [
"bash",
"-c",
f'cd {runit_dir}/ && for fn in */*/log/current; do echo "$(dirname "$(dirname "$fn")")"; done',
]
ret = subprocess.run(cmd, check=False, stdout=subprocess.PIPE, text=True)
if not ret.returncode:
return runit_dir, ret.stdout.split()
else:
typer.secho("Failed to find list of available services", err=True, fg=c.RED)
typer.secho(f"stdout was: {ret.stdout!r}", err=True)
typer.secho(f"stderr was: {ret.stderr!r}", err=True)
raise typer.Exit(1)
def _log_popen_stdout(p):
while p.poll() is None:
line = p.stdout.readline().rstrip()
if not line:
continue
bg, fg = None, None
if match := LOG_PATTERN.match(line):
bg, fg = LOG_LEVEL_MAP.get(match.groups()[0], (bg, fg))
typer.secho(line, err=True, bg=bg, fg=fg)
if __name__ == "__main__":
app()
| DIRACGrid/DIRAC | integration_tests.py | Python | gpl-3.0 | 24,355 |
# Copyright 2013 Viewfinder Inc. All Rights Reserved.
"""Handlers for database administration.
MetricsHandler: main handler for detailed metrics. We don't use ajax-y tables, so there is no data handler.
"""
from tornado.escape import url_escape
__author__ = 'marc@emailscrubbed.com (Marc Berhault)'
import base64
import json
import logging
import re
import time
from collections import Counter, defaultdict
from tornado import auth, gen, template
from viewfinder.backend.base import constants, handler, util
from viewfinder.backend.base.dotdict import DotDict
from viewfinder.backend.db import db_client, metric, schema, vf_schema
from viewfinder.backend.www.admin import admin, formatters, data_table
kDefaultMetricName = 'itunes.downloads'
class MetricsHandler(admin.AdminHandler):
"""Provides a list of all datastore tables and allows each to be
drilled down.
"""
@handler.authenticated()
@handler.asynchronous(datastore=True)
@admin.require_permission(level='support')
@gen.engine
def get(self):
metric_name = self.get_argument('metric_name', kDefaultMetricName)
end_time = int(self.get_argument('end-secs', time.time()))
start_time = int(self.get_argument('start-secs', end_time - constants.SECONDS_PER_WEEK))
# Select an appropriate interval resolution based on the requested time span.
selected_interval = metric.LOGS_INTERVALS[-1]
group_key = metric.Metric.EncodeGroupKey(metric.LOGS_STATS_NAME, selected_interval)
logging.info('Query performance counters %s, range: %s - %s, resolution: %s'
% (group_key, time.ctime(start_time), time.ctime(end_time), selected_interval.name))
metrics = list()
start_key = None
while True:
new_metrics = yield gen.Task(metric.Metric.QueryTimespan, self._client, group_key,
start_time, end_time, excl_start_key=start_key)
if len(new_metrics) > 0:
metrics.extend(new_metrics)
start_key = metrics[-1].GetKey()
else:
break
columns, data = _SerializeMetrics(metrics, metric_name)
t_dict = {}
t_dict.update(self.PermissionsTemplateDict())
t_dict['col_names'] = columns
t_dict['col_data'] = data
t_dict['metric_name'] = metric_name
t_dict['start_secs'] = start_time
t_dict['end_secs'] = end_time
self.render('metrics_table.html', **t_dict)
# This is very hacky: basically, we only care about some part of the metric name.
# eg: in itunes.downloads.1_2.US, we just want the US part. itunes.downloads is already removed, so the index
# we care about is the 1st (zero indexed) in the remainder.
kMetricSignificantLevel = { 'itunes.downloads': 1, 'itunes.inapp_subscriptions_auto_renew': 1, 'itunes.updates': 1 }
# Display and sort properties. Array of (regexp, sort_by_count, show_total_in_column_name).
# If the base metric name matches the regexp, we apply sort_by_cound and show_total_in_column_name.
# Defaults are: sort_by_count = False, show_total_in_column_name = False.
kSortByCount = [ ('itunes.*', True, True) ]
def _SerializeMetrics(metrics, metric_name):
def _DisplayParams():
for regexp, sort, show in kSortByCount:
if re.match(regexp, metric_name):
return (sort, show)
return (False, False)
columns = Counter()
data = []
for m in metrics:
timestamp = m.timestamp
d = defaultdict(int)
d['day'] = util.TimestampUTCToISO8601(timestamp).replace('-', '/')
dd = DotDict(json.loads(m.payload))
if metric_name not in dd:
continue
payload = dd[metric_name].flatten()
for k, v in payload.iteritems():
if metric_name in kMetricSignificantLevel:
k = k.split('.')[kMetricSignificantLevel[metric_name]]
columns[k] += v
d[k] += v
d['Total'] += v
columns['Total'] += v
data.append(d)
# We now have "columns" with totals for each column. We need to sort everything.
sort_by_count, show_total = _DisplayParams()
if sort_by_count:
sorted_cols = columns.most_common()
else:
sorted_cols = sorted([(k, v) for k, v in columns.iteritems()])
cols = ['Day']
cols.append('Total %d' % columns['Total'] if show_total else 'Total')
for k, v in sorted_cols:
if k == 'Total':
continue
cols.append('%s %d' % (k, v) if show_total else k)
sorted_data = []
for d in reversed(data):
s = [d['day'], d['Total']]
for k, _ in sorted_cols:
if k == 'Total':
continue
s.append(d[k] if d[k] > 0 else '')
sorted_data.append(s)
return (cols, sorted_data)
| 0359xiaodong/viewfinder | backend/www/admin/metrics.py | Python | apache-2.0 | 4,540 |
import traceback
import os
import wx
from wx.lib.embeddedimage import PyEmbeddedImage
try:
import wx.lib.agw.customtreectrl as customtreectrl
except ImportError:
import wx.lib.customtreectrl as customtreectrl
#####################################################################################################
# Fixing bug with styles conflict
#
"""
Window Styles
=============
This class supports the following window styles:
============================== =========== ==================================================
Window Styles Hex Value Description
============================== =========== ==================================================
``TR_NO_BUTTONS`` 0x0 For convenience to document that no buttons are to be drawn.
``TR_SINGLE`` 0x0 For convenience to document that only one item may be selected at a time. Selecting another item causes the current selection, if any, to be deselected. This is the default.
``TR_HAS_BUTTONS`` 0x1 Use this style to show + and - buttons to the left of parent items.
``TR_NO_LINES`` 0x4 Use this style to hide vertical level connectors.
``TR_LINES_AT_ROOT`` 0x8 Use this style to show lines between root nodes. Only applicable if ``TR_HIDE_ROOT`` is set and ``TR_NO_LINES`` is not set.
``TR_DEFAULT_STYLE`` 0x9 The set of flags that are closest to the defaults for the native control for a particular toolkit.
``TR_TWIST_BUTTONS`` 0x10 Use old Mac-twist style buttons.
``TR_MULTIPLE`` 0x20 Use this style to allow a range of items to be selected. If a second range is selected, the current range, if any, is deselected.
``TR_EXTENDED`` 0x40 Use this style to allow disjoint items to be selected. (Only partially implemented; may not work in all cases).
``TR_HAS_VARIABLE_ROW_HEIGHT`` 0x80 Use this style to cause row heights to be just big enough to fit the content. If not set, all rows use the largest row height. The default is that this flag is unset.
``TR_EDIT_LABELS`` 0x200 Use this style if you wish the user to be able to edit labels in the tree control.
``TR_ROW_LINES`` 0x400 Use this style to draw a contrasting border between displayed rows.
``TR_HIDE_ROOT`` 0x800 Use this style to suppress the display of the root node, effectively causing the first-level nodes to appear as a series of root nodes.
``wx.gizmos.TR_COLUMN_LINES`` 0x1000
``wx.gizmos.TR_VIRTUAL`` 0x4000 conflict !!!
``TR_FULL_ROW_HIGHLIGHT`` 0x2000 Use this style to have the background colour and the selection highlight extend over the entire horizontal row of the tree control window.
``TR_AUTO_CHECK_CHILD`` 0x4000 Only meaningful foe checkbox-type items: when a parent item is checked/unchecked its children are checked/unchecked as well.
``TR_AUTO_TOGGLE_CHILD`` 0x8000 Only meaningful foe checkbox-type items: when a parent item is checked/unchecked its children are toggled accordingly.
``TR_AUTO_CHECK_PARENT`` 0x10000 Only meaningful foe checkbox-type items: when a child item is checked/unchecked its parent item is checked/unchecked as well.
``TR_ALIGN_WINDOWS`` 0x20000 Flag used to align windows (in items with windows) at the same horizontal position.
============================== =========== ==================================================
"""
import wx.gizmos
if wx.gizmos.TR_VIRTUAL == customtreectrl.TR_AUTO_CHECK_CHILD:
#rint "* fixing TR_AUTO_CHECK_CHILD vs TR_VIRTUAL conflict"
customtreectrl.TR_AUTO_CHECK_CHILD = 0x10000
customtreectrl.TR_AUTO_TOGGLE_CHILD = 0x20000
customtreectrl.TR_AUTO_CHECK_PARENT = 0x40000
customtreectrl.TR_ALIGN_WINDOWS = 0x80000
#####################################################################################################
try:
from wx.lib.agw.customtreectrl import *
except ImportError:
from wx.lib.customtreectrl import *
if hasattr(CustomTreeCtrl, 'GetControlBmp'):
# 16 x 16 images
Checked = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAAMZJ"
"REFUOI2lkz0KAjEQhd97s7fwBHYiCGLhFbSwsNGbWHkA8SZa2HgASxER9AKeQ4t1soFNdpEd"
"CBkm+SbzkyFl6CLqRAMoXOnNNp9/4fdxyyI2XParoJOCVC4zg2Rhl4T+egcgk0IbLFVYzUET"
"PDgMf2eWdtAGAwhRJB04PDlPk7DfIZl3YFaGNzqNa/Br+QTJfAT+oksOJrMplGHf5tfYXIOz"
"KcSteizuWbihBlWfJWuFk0V02KvdBAPRLAAI3/MfYddx/gKqcR/ADJsz+gAAAABJRU5ErkJg"
"gg==")
NotChecked = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAAI1J"
"REFUOI2lk8ENgzAMRf//9hYdgWF65NpFOHHsqaOwAcswRzlQIFIJOMRSFCvKe7Ism5ShJlRF"
"A/A1eTy7byk8DT09fRjf7ZZL+h2DmcPM4L7ckqF5fZZ/R+YrWNqxP0EEVtJ43YGzFUThE0EM"
"zgqi8IkgBpMZQRQmma+gBD5sYgkMJLsAYBvPkmDtOs9/uQs8PMn4RwAAAABJRU5ErkJggg==")
Flagged = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAAflJ"
"REFUOI2lk99Lk1EYx7/nObvahUK7cyNkrA1piOg0BokFvSAiDJftIrpof4E5uoqBCIlFUuyy"
"27Ag3I1TunmjhGplThYZL7UwL14EV2yxzb1zzvF0oy+81fqBXzhX5/l+zvc55zlCkMRxRMdy"
"A7C12oglkqxmciiUDDja7VACXtydGBc/14nfteAMxXlkwAOlz42ArwPZzTzU9S0spT9ie/Gm"
"+CMgcuM+hwd9OOs/CSKClBJEElJKvPyg4/GzDTycjpoQyx04Q/GWZiKJcz1uXL7QA2cozr8A"
"Yokkjwx4THNDNPC8vILoRhSLegq1Zg1EhPO9HoSHuhFLJNkCUNdyUPrc5smvd98gWV+AYavi"
"tnYLqS8pEEkQEUaDXVDXctYEhbKBgK/DjDuvz6NQLKJqGNirNzD39g6ICEIIBP2dKJQNK8DR"
"Zkd2M2/2fNExjp3tHXzNf0O59B2x3usQQoCIsKrpcLTZrQCl3wt1fctMMOwaxjXPJGjXhpkz"
"s4icjhwmICynNSj93sN3JGku19gUL73IcqVS4mq1wrWawfX6Hu/v1/ngoMHNZpOfZj6xa2yK"
"jzwWgCCJK9MP+Mmr9y3NV2cescXTahLDQ90YDXYh6O/EqqZjOa0hufLu75N4pMnEAquZzyiW"
"DJxot0MJnMK9iUv/9hf+Rz8AshHWKLtawgYAAAAASUVORK5CYII=")
NotFlagged = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAAX5J"
"REFUOI2lk7FLQlEUh3/v3Dc5OLiFDiK9QUpK5A0WVCCC4CBGLbVE/0C4NQgS1BxOzS1B2FLY"
"ZIJBSGhRQvEIQQRrE0MtrUxOS0qvekl14HKHe7+Pc+/9XUkigf8U/YsGIBstROIJTuWLqDVa"
"sJhN8KsKNlfmpc/7pO+OYA1FOaA64HPb4VGGUChVkb4oI3l6g7v9delHwdzqFocnFUyM2EAk"
"IIQMIQRkWUb2+ha7mSvsrC33Jbo7sIaihrAQAtPjDiz4xmANRfmLIBJPcEB1GMJEAkSEGfcw"
"ZqdciMQTrBOk8kX43PYf4d4c9DqRyhf1HdQaLXiUoYEwEcE7aket0dILLGYTCqXqQJiIkNMq"
"sJhNeoFfVZC+KA+EiQjJrAa/qry/I4n+sIVjfHB8zvX6PTebdX58fOB2u8XPz0/c6bxwt/vK"
"R2c3bAvHuMfoBBIJLK5t8+HJpSG8tLHDOsYoibNTLgS9TnhH7chpFSSzGvYyl4OT+DEXf/4L"
"v6k3SsulH1+ZQhYAAAAASUVORK5CYII=")
HalfChecked = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAAP1J"
"REFUOI1jZGRiZqAEMFGkm4GBgQXGkPav/U+q5qcbmxlZkAUOdIfD2UxMTFDMzMDMzMLAzMzM"
"wMICoZmYmBm0EyZC1GEzmZBmJiaENgwDiNHMhBTwTPg0H/l+hCH1eirDlmdbUTTjdAG65jU/"
"VzN8Y/nK0Hmtg2Hl7ZVIhuA0AOHsJY+XMLx9947h67dvDD9+/mboOdWN5EIcBiD7OVg4hOHF"
"0xcMr16+Zvj08T1DkVEJMQYgAixQPpChQKWQgekLC0OreTtDomEiXDMjI0IbSjpAD+1ozRiG"
"WO04FJsZGZkYGBkZcbsAPbTxacYaiKRoxvACLHmSAhgpzc4AZkA5MrlvgsQAAAAASUVORK5C"
"YII=")
else:
# 13 x 13
HalfChecked = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAAA0AAAANCAYAAABy6+R8AAAABHNCSVQICAgIfAhkiAAAAOZJ"
"REFUKJFjlAls+M9AImBhYGBgONAdDhdgYmKCYmYGZmYWBmZmZgYWFgjNxMTMoJ0wkYEJ2QRC"
"GpiYIMqZSNHAxMSM0ISu4cj3Iwyp11MZtjzbiqIBxSZ0DWt+rmb4xvKVofNaB8PK2yuRNKJo"
"QjhpyeMlDG/fvWP4+u0bw4+fvxl6TnUjuQRJE7IfgoVDGF48fcHw6uVrhk8f3zMUGZXg0oTw"
"dKB8IEOBSiED0xcWhlbzdoZEw0S4BkZGJkQ8oYdStGYMQ6x2HIoNjIxMDIyMjKg2oYcSLg0o"
"AUGsBgYGBgZGctIeAFbpN9iWnQaVAAAAAElFTkSuQmCC")
TreeItemIcon_HalfChecked = 4
class AlmostFalse(object):
def __nonzero__(self):
return False
def __str__(self):
return self.__class__.__name__
__repr__ = __str__
def __eq__(self, v):
return v is False or v is 0
AlmostFalse = AlmostFalse()
BaseCustomTreeCtrl = CustomTreeCtrl
class CustomTreeCtrl(BaseCustomTreeCtrl):
def __init__(self, *args, **kwargs):
BaseCustomTreeCtrl.__init__(self, *args, **kwargs)
def GetControlBmp(self, checkbox=True, checked=False, enabled=True,
halfChecked=False, # this is my one
**argw # ignore other
):
"""
bring back my nice checkboxes to me
"""
if checkbox:
if checked:
ei = Checked
elif halfChecked:
ei = HalfChecked
else:
ei = NotChecked
else:
if checked:
ei = Flagged
else:
ei = NotFlagged
bmp = ei.getBitmap()
if not enabled:
image = wx.ImageFromBitmap(bmp)
image = GrayOut(image)
bmp = wx.BitmapFromImage(image)
return bmp
def SetImageListCheck(self, *args, **kwargs):
"""
add half checked image to image lists
"""
BaseCustomTreeCtrl.SetImageListCheck(self, *args, **kwargs)
self._imageListCheck .Add(self.GetControlBmp(checkbox=True, checked=False, halfChecked=True, enabled=True ))
self._grayedCheckList.Add(self.GetControlBmp(checkbox=True, checked=False, halfChecked=True, enabled=False))
def AutoCheckParent(self, item, checked):
"""Traverses up the tree and checks/unchecks parent items.
Meaningful only for check items."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
parent = item.GetParent()
if not parent or parent.GetType() != 1:
return
almost = False
(child, cookie) = self.GetFirstChild(parent)
while child:
if child.GetType() == 1 and child.IsEnabled():
if checked != child.IsChecked():
almost = True
break
(child, cookie) = self.GetNextChild(parent, cookie)
self.CheckItem2(parent, AlmostFalse if almost else checked, torefresh=True)
self.AutoCheckParent(parent, checked)
def SetFocus(self):
# BUGFIX: eliminate SetFocus called from CustomTreeCtrl __init__
file, line, fn, code = traceback.extract_stack()[-2]
if (
not (os.path.sep.join(('wx','lib','customtreectrl')) in file and fn == '__init__')
and not (os.path.sep.join(('wx','lib','agw','customtreectrl')) in file and fn == '__init__')
and not (os.path.sep.join(('wx','lib','agw','hypertreelist')) in file and fn == '__init__')
):
super(CustomTreeCtrl, self).SetFocus()
BaseGenericTreeItem = GenericTreeItem
class GenericTreeItem(BaseGenericTreeItem):
def __init__(self, *args, **kwargs):
BaseGenericTreeItem.__init__(self, *args, **kwargs)
def GetCurrentCheckedImage(self):
if self.IsChecked() is AlmostFalse:
return TreeItemIcon_HalfChecked
else:
return BaseGenericTreeItem.GetCurrentCheckedImage(self)
# monkey patching, no other way to set GenericTreeItem
customtreectrl.CustomTreeCtrl = CustomTreeCtrl
customtreectrl.GenericTreeItem = GenericTreeItem
del customtreectrl
| onoga/toolib | toolib/wx/controls/CustomTreeCtrl.py | Python | gpl-2.0 | 10,497 |
"""
Demo light platform that implements lights.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/demo/
"""
import asyncio
import random
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT,
ATTR_RGB_COLOR, ATTR_WHITE_VALUE, ATTR_XY_COLOR, SUPPORT_BRIGHTNESS,
SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_RGB_COLOR, SUPPORT_WHITE_VALUE,
Light)
LIGHT_COLORS = [
[237, 224, 33],
[255, 63, 111],
]
LIGHT_EFFECT_LIST = ['rainbow', 'none']
LIGHT_TEMPS = [240, 380]
SUPPORT_DEMO = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT |
SUPPORT_RGB_COLOR | SUPPORT_WHITE_VALUE)
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Setup the demo light platform."""
add_devices_callback([
DemoLight("Bed Light", False, True, effect_list=LIGHT_EFFECT_LIST,
effect=LIGHT_EFFECT_LIST[0]),
DemoLight("Ceiling Lights", True, True,
LIGHT_COLORS[0], LIGHT_TEMPS[1]),
DemoLight("Kitchen Lights", True, True,
LIGHT_COLORS[1], LIGHT_TEMPS[0])
])
class DemoLight(Light):
"""Representation of a demo light."""
def __init__(self, name, state, available=False, rgb=None, ct=None,
brightness=180, xy_color=(.5, .5), white=200,
effect_list=None, effect=None):
"""Initialize the light."""
self._name = name
self._state = state
self._rgb = rgb
self._ct = ct or random.choice(LIGHT_TEMPS)
self._brightness = brightness
self._xy_color = xy_color
self._white = white
self._effect_list = effect_list
self._effect = effect
@property
def should_poll(self) -> bool:
"""No polling needed for a demo light."""
return False
@property
def name(self) -> str:
"""Return the name of the light if any."""
return self._name
@property
def available(self) -> bool:
"""Return availability."""
# This demo light is always available, but well-behaving components
# should implement this to inform Home Assistant accordingly.
return True
@property
def brightness(self) -> int:
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def xy_color(self) -> tuple:
"""Return the XY color value [float, float]."""
return self._xy_color
@property
def rgb_color(self) -> tuple:
"""Return the RBG color value."""
return self._rgb
@property
def color_temp(self) -> int:
"""Return the CT color temperature."""
return self._ct
@property
def white_value(self) -> int:
"""Return the white value of this light between 0..255."""
return self._white
@property
def effect_list(self) -> list:
"""Return the list of supported effects."""
return self._effect_list
@property
def effect(self) -> str:
"""Return the current effect."""
return self._effect
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._state
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_DEMO
def turn_on(self, **kwargs) -> None:
"""Turn the light on."""
self._state = True
if ATTR_RGB_COLOR in kwargs:
self._rgb = kwargs[ATTR_RGB_COLOR]
if ATTR_COLOR_TEMP in kwargs:
self._ct = kwargs[ATTR_COLOR_TEMP]
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_XY_COLOR in kwargs:
self._xy_color = kwargs[ATTR_XY_COLOR]
if ATTR_WHITE_VALUE in kwargs:
self._white = kwargs[ATTR_WHITE_VALUE]
if ATTR_EFFECT in kwargs:
self._effect = kwargs[ATTR_EFFECT]
# As we have disabled polling, we need to inform
# Home Assistant about updates in our state ourselves.
self.schedule_update_ha_state()
def turn_off(self, **kwargs) -> None:
"""Turn the light off."""
self._state = False
# As we have disabled polling, we need to inform
# Home Assistant about updates in our state ourselves.
self.schedule_update_ha_state()
@asyncio.coroutine
def async_restore_state(self, is_on, **kwargs):
"""Restore the demo state."""
self._state = is_on
if 'brightness' in kwargs:
self._brightness = kwargs['brightness']
if 'color_temp' in kwargs:
self._ct = kwargs['color_temp']
if 'rgb_color' in kwargs:
self._rgb = kwargs['rgb_color']
if 'xy_color' in kwargs:
self._xy_color = kwargs['xy_color']
if 'white_value' in kwargs:
self._white = kwargs['white_value']
if 'effect' in kwargs:
self._effect = kwargs['effect']
| Duoxilian/home-assistant | homeassistant/components/light/demo.py | Python | mit | 5,101 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ["preview"],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_forwarding_rule
description:
- A ForwardingRule resource. A ForwardingRule resource specifies which pool of target
virtual machines to forward a packet to if it matches the given [IPAddress, IPProtocol,
portRange] tuple.
short_description: Creates a GCP ForwardingRule
version_added: 2.6
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices:
- present
- absent
default: present
description:
description:
- An optional description of this resource. Provide this property when you create
the resource.
required: false
ip_address:
description:
- The IP address that this forwarding rule is serving on behalf of.
- Addresses are restricted based on the forwarding rule's load balancing scheme
(EXTERNAL or INTERNAL) and scope (global or regional).
- When the load balancing scheme is EXTERNAL, for global forwarding rules, the
address must be a global IP, and for regional forwarding rules, the address
must live in the same region as the forwarding rule. If this field is empty,
an ephemeral IPv4 address from the same scope (global or regional) will be assigned.
A regional forwarding rule supports IPv4 only. A global forwarding rule supports
either IPv4 or IPv6.
- When the load balancing scheme is INTERNAL, this can only be an RFC 1918 IP
address belonging to the network/subnet configured for the forwarding rule.
By default, if this field is empty, an ephemeral internal IP address will be
automatically allocated from the IP range of the subnet or network configured
for this forwarding rule.
- 'An address can be specified either by a literal IP address or a URL reference
to an existing Address resource. The following examples are all valid: * 100.1.2.3
* U(https://www.googleapis.com/compute/v1/projects/project/regions/region/addresses/address)
* projects/project/regions/region/addresses/address * regions/region/addresses/address
* global/addresses/address * address .'
required: false
ip_protocol:
description:
- The IP protocol to which this rule applies. Valid options are TCP, UDP, ESP,
AH, SCTP or ICMP.
- When the load balancing scheme is INTERNAL, only TCP and UDP are valid.
required: false
choices:
- TCP
- UDP
- ESP
- AH
- SCTP
- ICMP
backend_service:
description:
- A reference to a BackendService to receive the matched traffic.
- This is used for internal load balancing.
- "(not used for external load balancing) ."
- 'This field represents a link to a BackendService resource in GCP. It can be
specified in two ways. First, you can place in the selfLink of the resource
here as a string Alternatively, you can add `register: name-of-resource` to
a gcp_compute_backend_service task and then set this backend_service field to
"{{ name-of-resource }}"'
required: false
ip_version:
description:
- The IP Version that will be used by this forwarding rule. Valid options are
IPV4 or IPV6. This can only be specified for a global forwarding rule.
required: false
choices:
- IPV4
- IPV6
load_balancing_scheme:
description:
- 'This signifies what the ForwardingRule will be used for and can only take the
following values: INTERNAL, EXTERNAL The value of INTERNAL means that this will
be used for Internal Network Load Balancing (TCP, UDP). The value of EXTERNAL
means that this will be used for External Load Balancing (HTTP(S) LB, External
TCP/UDP LB, SSL Proxy) .'
required: false
choices:
- INTERNAL
- EXTERNAL
name:
description:
- Name of the resource; provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
required: true
network:
description:
- For internal load balancing, this field identifies the network that the load
balanced IP should belong to for this Forwarding Rule. If this field is not
specified, the default network will be used.
- This field is not used for external load balancing.
- 'This field represents a link to a Network resource in GCP. It can be specified
in two ways. First, you can place in the selfLink of the resource here as a
string Alternatively, you can add `register: name-of-resource` to a gcp_compute_network
task and then set this network field to "{{ name-of-resource }}"'
required: false
port_range:
description:
- This field is used along with the target field for TargetHttpProxy, TargetHttpsProxy,
TargetSslProxy, TargetTcpProxy, TargetVpnGateway, TargetPool, TargetInstance.
- Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets addressed
to ports in the specified range will be forwarded to target.
- Forwarding rules with the same [IPAddress, IPProtocol] pair must have disjoint
port ranges.
- 'Some types of forwarding target have constraints on the acceptable ports: *
TargetHttpProxy: 80, 8080 * TargetHttpsProxy: 443 * TargetTcpProxy: 25, 43,
110, 143, 195, 443, 465, 587, 700, 993, 995, 1883, 5222 * TargetSslProxy: 25,
43, 110, 143, 195, 443, 465, 587, 700, 993, 995, 1883, 5222 * TargetVpnGateway:
500, 4500 .'
required: false
ports:
description:
- This field is used along with the backend_service field for internal load balancing.
- When the load balancing scheme is INTERNAL, a single port or a comma separated
list of ports can be configured. Only packets addressed to these ports will
be forwarded to the backends configured with this forwarding rule.
- You may specify a maximum of up to 5 ports.
required: false
subnetwork:
description:
- A reference to a subnetwork.
- For internal load balancing, this field identifies the subnetwork that the load
balanced IP should belong to for this Forwarding Rule.
- If the network specified is in auto subnet mode, this field is optional. However,
if the network is in custom subnet mode, a subnetwork must be specified.
- This field is not used for external load balancing.
- 'This field represents a link to a Subnetwork resource in GCP. It can be specified
in two ways. First, you can place in the selfLink of the resource here as a
string Alternatively, you can add `register: name-of-resource` to a gcp_compute_subnetwork
task and then set this subnetwork field to "{{ name-of-resource }}"'
required: false
target:
description:
- A reference to a TargetPool resource to receive the matched traffic.
- For regional forwarding rules, this target must live in the same region as the
forwarding rule. For global forwarding rules, this target must be a global load
balancing resource. The forwarded traffic must be of a type appropriate to the
target object.
- This field is not used for internal load balancing.
- 'This field represents a link to a TargetPool resource in GCP. It can be specified
in two ways. First, you can place in the selfLink of the resource here as a
string Alternatively, you can add `register: name-of-resource` to a gcp_compute_target_pool
task and then set this target field to "{{ name-of-resource }}"'
required: false
version_added: 2.7
network_tier:
description:
- 'The networking tier used for configuring this address. This field can take
the following values: PREMIUM or STANDARD. If this field is not specified, it
is assumed to be PREMIUM.'
required: false
version_added: 2.8
choices:
- PREMIUM
- STANDARD
region:
description:
- A reference to the region where the regional forwarding rule resides.
- This field is not applicable to global forwarding rules.
required: true
extends_documentation_fragment: gcp
notes:
- 'API Reference: U(https://cloud.google.com/compute/docs/reference/latest/forwardingRule)'
- 'Official Documentation: U(https://cloud.google.com/compute/docs/load-balancing/network/forwarding-rules)'
'''
EXAMPLES = '''
- name: create a address
gcp_compute_address:
name: "address-forwardingrule"
region: us-west1
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: address
- name: create a target pool
gcp_compute_target_pool:
name: "targetpool-forwardingrule"
region: us-west1
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: targetpool
- name: create a forwarding rule
gcp_compute_forwarding_rule:
name: "test_object"
region: us-west1
target: "{{ targetpool }}"
ip_protocol: TCP
port_range: 80-80
ip_address: "{{ address.address }}"
project: "test_project"
auth_kind: "serviceaccount"
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource. Provide this property when you create
the resource.
returned: success
type: str
id:
description:
- The unique identifier for the resource.
returned: success
type: int
IPAddress:
description:
- The IP address that this forwarding rule is serving on behalf of.
- Addresses are restricted based on the forwarding rule's load balancing scheme
(EXTERNAL or INTERNAL) and scope (global or regional).
- When the load balancing scheme is EXTERNAL, for global forwarding rules, the address
must be a global IP, and for regional forwarding rules, the address must live
in the same region as the forwarding rule. If this field is empty, an ephemeral
IPv4 address from the same scope (global or regional) will be assigned. A regional
forwarding rule supports IPv4 only. A global forwarding rule supports either IPv4
or IPv6.
- When the load balancing scheme is INTERNAL, this can only be an RFC 1918 IP address
belonging to the network/subnet configured for the forwarding rule. By default,
if this field is empty, an ephemeral internal IP address will be automatically
allocated from the IP range of the subnet or network configured for this forwarding
rule.
- 'An address can be specified either by a literal IP address or a URL reference
to an existing Address resource. The following examples are all valid: * 100.1.2.3
* U(https://www.googleapis.com/compute/v1/projects/project/regions/region/addresses/address)
* projects/project/regions/region/addresses/address * regions/region/addresses/address
* global/addresses/address * address .'
returned: success
type: str
IPProtocol:
description:
- The IP protocol to which this rule applies. Valid options are TCP, UDP, ESP, AH,
SCTP or ICMP.
- When the load balancing scheme is INTERNAL, only TCP and UDP are valid.
returned: success
type: str
backendService:
description:
- A reference to a BackendService to receive the matched traffic.
- This is used for internal load balancing.
- "(not used for external load balancing) ."
returned: success
type: str
ipVersion:
description:
- The IP Version that will be used by this forwarding rule. Valid options are IPV4
or IPV6. This can only be specified for a global forwarding rule.
returned: success
type: str
loadBalancingScheme:
description:
- 'This signifies what the ForwardingRule will be used for and can only take the
following values: INTERNAL, EXTERNAL The value of INTERNAL means that this will
be used for Internal Network Load Balancing (TCP, UDP). The value of EXTERNAL
means that this will be used for External Load Balancing (HTTP(S) LB, External
TCP/UDP LB, SSL Proxy) .'
returned: success
type: str
name:
description:
- Name of the resource; provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
returned: success
type: str
network:
description:
- For internal load balancing, this field identifies the network that the load balanced
IP should belong to for this Forwarding Rule. If this field is not specified,
the default network will be used.
- This field is not used for external load balancing.
returned: success
type: str
portRange:
description:
- This field is used along with the target field for TargetHttpProxy, TargetHttpsProxy,
TargetSslProxy, TargetTcpProxy, TargetVpnGateway, TargetPool, TargetInstance.
- Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets addressed to
ports in the specified range will be forwarded to target.
- Forwarding rules with the same [IPAddress, IPProtocol] pair must have disjoint
port ranges.
- 'Some types of forwarding target have constraints on the acceptable ports: * TargetHttpProxy:
80, 8080 * TargetHttpsProxy: 443 * TargetTcpProxy: 25, 43, 110, 143, 195, 443,
465, 587, 700, 993, 995, 1883, 5222 * TargetSslProxy: 25, 43, 110, 143, 195, 443,
465, 587, 700, 993, 995, 1883, 5222 * TargetVpnGateway: 500, 4500 .'
returned: success
type: str
ports:
description:
- This field is used along with the backend_service field for internal load balancing.
- When the load balancing scheme is INTERNAL, a single port or a comma separated
list of ports can be configured. Only packets addressed to these ports will be
forwarded to the backends configured with this forwarding rule.
- You may specify a maximum of up to 5 ports.
returned: success
type: list
subnetwork:
description:
- A reference to a subnetwork.
- For internal load balancing, this field identifies the subnetwork that the load
balanced IP should belong to for this Forwarding Rule.
- If the network specified is in auto subnet mode, this field is optional. However,
if the network is in custom subnet mode, a subnetwork must be specified.
- This field is not used for external load balancing.
returned: success
type: str
target:
description:
- A reference to a TargetPool resource to receive the matched traffic.
- For regional forwarding rules, this target must live in the same region as the
forwarding rule. For global forwarding rules, this target must be a global load
balancing resource. The forwarded traffic must be of a type appropriate to the
target object.
- This field is not used for internal load balancing.
returned: success
type: str
networkTier:
description:
- 'The networking tier used for configuring this address. This field can take the
following values: PREMIUM or STANDARD. If this field is not specified, it is assumed
to be PREMIUM.'
returned: success
type: str
region:
description:
- A reference to the region where the regional forwarding rule resides.
- This field is not applicable to global forwarding rules.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict
import json
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
description=dict(type='str'),
ip_address=dict(type='str'),
ip_protocol=dict(type='str', choices=['TCP', 'UDP', 'ESP', 'AH', 'SCTP', 'ICMP']),
backend_service=dict(),
ip_version=dict(type='str', choices=['IPV4', 'IPV6']),
load_balancing_scheme=dict(type='str', choices=['INTERNAL', 'EXTERNAL']),
name=dict(required=True, type='str'),
network=dict(),
port_range=dict(type='str'),
ports=dict(type='list', elements='str'),
subnetwork=dict(),
target=dict(),
network_tier=dict(type='str', choices=['PREMIUM', 'STANDARD']),
region=dict(required=True, type='str')
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
state = module.params['state']
kind = 'compute#forwardingRule'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
update(module, self_link(module), kind, fetch)
fetch = fetch_resource(module, self_link(module), kind)
changed = True
else:
delete(module, self_link(module), kind)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.post(link, resource_to_request(module)))
def update(module, link, kind, fetch):
update_fields(module, resource_to_request(module),
response_to_hash(module, fetch))
return fetch_resource(module, self_link(module), kind)
def update_fields(module, request, response):
if response.get('target') != request.get('target'):
target_update(module, request, response)
def target_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join([
"https://www.googleapis.com/compute/v1/",
"projects/{project}/regions/{region}/forwardingRules/{name}/setTarget"
]).format(**module.params),
{
u'target': replace_resource_dict(module.params.get(u'target', {}), 'selfLink')
}
)
def delete(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.delete(link))
def resource_to_request(module):
request = {
u'kind': 'compute#forwardingRule',
u'description': module.params.get('description'),
u'IPAddress': module.params.get('ip_address'),
u'IPProtocol': module.params.get('ip_protocol'),
u'backendService': replace_resource_dict(module.params.get(u'backend_service', {}), 'selfLink'),
u'ipVersion': module.params.get('ip_version'),
u'loadBalancingScheme': module.params.get('load_balancing_scheme'),
u'name': module.params.get('name'),
u'network': replace_resource_dict(module.params.get(u'network', {}), 'selfLink'),
u'portRange': module.params.get('port_range'),
u'ports': module.params.get('ports'),
u'subnetwork': replace_resource_dict(module.params.get(u'subnetwork', {}), 'selfLink'),
u'target': replace_resource_dict(module.params.get(u'target', {}), 'selfLink'),
u'networkTier': module.params.get('network_tier')
}
return_vals = {}
for k, v in request.items():
if v or v is False:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind, allow_not_found=True):
auth = GcpSession(module, 'compute')
return return_if_object(module, auth.get(link), kind, allow_not_found)
def self_link(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/forwardingRules/{name}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/forwardingRules".format(**module.params)
def return_if_object(module, response, kind, allow_not_found=False):
# If not found, return nothing.
if allow_not_found and response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'creationTimestamp': response.get(u'creationTimestamp'),
u'description': response.get(u'description'),
u'id': response.get(u'id'),
u'IPAddress': response.get(u'IPAddress'),
u'IPProtocol': response.get(u'IPProtocol'),
u'backendService': response.get(u'backendService'),
u'ipVersion': response.get(u'ipVersion'),
u'loadBalancingScheme': response.get(u'loadBalancingScheme'),
u'name': response.get(u'name'),
u'network': response.get(u'network'),
u'portRange': response.get(u'portRange'),
u'ports': response.get(u'ports'),
u'subnetwork': response.get(u'subnetwork'),
u'target': response.get(u'target'),
u'networkTier': module.params.get('network_tier')
}
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/operations/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response, 'compute#operation')
if op_result is None:
return {}
status = navigate_hash(op_result, ['status'])
wait_done = wait_for_completion(status, op_result, module)
return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#forwardingRule')
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while status != 'DONE':
raise_if_errors(op_result, ['error', 'errors'], 'message')
time.sleep(1.0)
op_result = fetch_resource(module, op_uri, 'compute#operation')
status = navigate_hash(op_result, ['status'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
if __name__ == '__main__':
main()
| gregdek/ansible | lib/ansible/modules/cloud/google/gcp_compute_forwarding_rule.py | Python | gpl-3.0 | 25,478 |
# coding=utf-8
from __future__ import absolute_import
import gevent
from gevent.pywsgi import WSGIHandler
import sys
from webob import Request
from .response import Response
from .socket import Socket
from ..event_emitter import EventEmitter
from .transports import WebsocketTransport
import logging
logger = logging.getLogger(__name__)
__all__ = ['EngineHandler']
class EngineHandler(WSGIHandler, EventEmitter):
"""
The WSGIHandler for EngineServer
It filters out interested requests and process them, leave other requests to the WSGIHandler
"""
transports = ('polling', 'websocket')
def __init__(self, server_context, *args, **kwargs):
super(EngineHandler, self).__init__(*args, **kwargs)
EventEmitter.__init__(self)
self.server_context = server_context
if self.server_context.transports:
self.transports = self.server_context.transports
def bind_framework_info(self, socket):
# Run framework's wsgi application to hook up framework specific info eg. request
# This is why we define /socket.io url in web frameworks and points them to a view
logger.debug("[EngineHandler] Bind the framework specific info to engine socket")
self.environ['engine_socket'] = socket
try:
def start_response(status, headers):
logger.debug("[EngineHandler] [%s] [%s]" % (status, headers))
res = self.application(self.environ, start_response)
logger.debug("[EngineHandler] %s" % res)
except Exception, e:
logger.debug("[EngineHandler] bind framework info met exception %s" % e)
self.handle_error(*sys.exc_info())
def handle_one_response(self):
"""
There are 3 situations we get a new request:
1. Handshake.
2. Upgrade.
3. Polling Request.
After the transport been upgraded, all data transferring handled by the WebSocketTransport
"""
path = self.environ.get('PATH_INFO')
if not path.lstrip('/').startswith(self.server_context.resource + '/'):
return super(EngineHandler, self).handle_one_response()
# Create a request and a response
request = Request(self.get_environ())
setattr(request, 'handler', self)
setattr(request, 'response', Response())
logger.debug("[EngineHandler] Incoming request with %s" % request.GET)
# Upgrade the websocket if needed
is_websocket = False
if request.GET.get("transport", None) == "websocket":
if 'Upgrade' in request.headers:
logger.debug("[EngineHandler] It is a websocket upgrade request")
# This is the ws upgrade request, here we handles the upgrade
ws_handler = self.server_context.ws_handler_class(self.socket, self.client_address, self.server)
ws_handler.__dict__.update(self.__dict__)
ws_handler.prevent_wsgi_call = True
ws_handler.handle_one_response()
# Suppose here we have an websocket connection
setattr(request, 'websocket', ws_handler.websocket)
is_websocket = True
else:
logger.warning("[EngineHandler] Client fired a websocket but the 'Upgrade' Header loose somewhere, maybe your proxy")
return
sid = request.GET.get("sid", None)
b64 = request.GET.get("b64", False)
socket = self.server_context.engine_sockets.get(sid, None)
# FIXME CHECK WHETHER WE NEED THIS?
if socket and not is_websocket:
# We spawn a new gevent here, let socket do its own business.
# In current event loop, we will wait on request.response, which is set in socket.set_request
logger.debug("[EngineHandler] Found existing socket")
self.bind_framework_info(socket)
gevent.spawn(socket.process_request, request)
else:
if socket is None:
logger.debug("[EngineHandler] No existing socket, handshake")
socket = self._do_handshake(b64=b64, request=request)
if not is_websocket:
logger.debug("[EngineHandler] The incoming request not websocket, bind framework info")
self.bind_framework_info(socket)
if is_websocket and socket.transport.name != 'websocket':
logger.debug("[EngineHandler] websocket, proceed as upgrade")
# Here we have a upgrade
ws_transport = WebsocketTransport(self, {})
ws_transport.process_request(request)
socket.maybe_upgrade(ws_transport)
# wait till the response ends
logger.debug("[EngineHandler] Waiting for the response signal")
request.response.join()
# The response object can be used as a wsgi application which will send out the buffer
self.application = request.response
# Call super handle_one_repsponse() to do timing, logging etc
super(EngineHandler, self).handle_one_response()
self.emit('cleanup')
def _do_handshake(self, b64, request):
"""
handshake with client to build a socket
:param b64:
:param request:
:return:
"""
transport_name = request.GET.get('transport', None)
if transport_name not in self.transports:
raise ValueError("transport name [%s] not supported" % transport_name)
socket = Socket(request, supports_binary=not bool(b64))
self.server_context.engine_sockets[socket.id] = socket
def remove_socket(*args, **kwargs):
self.server_context.engine_sockets.pop(socket.id)
socket.on('close', remove_socket)
request.response.headers['Set-Cookie'] = 'io=%s' % socket.id
socket.open()
self.emit('connection', socket)
return socket
| shuoli84/gevent_socketio2 | socketio/engine/handler.py | Python | mit | 5,977 |
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy import Column, Integer, String, ForeignKey, Text, Float
from sqlalchemy.orm import relationship, backref
from partsdb.system.Tables import Base, BaseMixIn, PartMixIn, ExonMixIn, AnnotationMixIn
from partsdb.tools.Annotators import BlastAnnotator, TableAnnotator
class Locus(Base, BaseMixIn):
coordinates = Column( Text )
class Promoter(Base,BaseMixIn,PartMixIn):
pass
class UTR5(Base,BaseMixIn,PartMixIn, ExonMixIn):
pass
class CDS(Base,BaseMixIn,PartMixIn, ExonMixIn):
pass
class UTR3(Base,BaseMixIn,PartMixIn, ExonMixIn):
pass
class Terminator(Base,BaseMixIn,PartMixIn):
pass
class Gene(Base,BaseMixIn):
name = Column( String(100) )
alias = Column( String(100) )
transcriptName = Column( String(200) )
promoterID = Column( Integer, ForeignKey('promoter.id') )
utr5ID = Column( Integer, ForeignKey('utr5.id') )
cdsID = Column( Integer, ForeignKey('cds.id') )
utr3ID = Column( Integer, ForeignKey('utr3.id') )
terminatorID = Column( Integer, ForeignKey('terminator.id') )
locusID = Column( Integer, ForeignKey('locus.id') )
locusStrand = Column( Integer )
promoter = relationship(Promoter, backref=backref("gene"), enable_typechecks=False)
utr5 = relationship(UTR5, backref=backref("gene"), enable_typechecks=False)
cds = relationship(CDS, backref=backref("gene"), enable_typechecks=False)
utr3 = relationship(UTR3, backref=backref("gene"), enable_typechecks=False)
terminator = relationship(Terminator, backref=backref("gene"), enable_typechecks=False)
locus = relationship(Locus, backref=backref("gene"), enable_typechecks=False)
class BlastpHit( Base, BaseMixIn, AnnotationMixIn):
__targetclass__ = CDS
__annotatorclass__ = BlastAnnotator
coverage = Column( Float )
qLen = Column( Integer )
tLen = Column( Integer )
tStart = Column( Integer )
tEnd = Column( Integer )
identity = Column( Float )
class InterProHit( Base, BaseMixIn, AnnotationMixIn):
__targetclass__ = CDS
__annotatorclass__ = TableAnnotator
class DbxRef( Base, BaseMixIn, AnnotationMixIn ):
__targetclass__ = CDS
__annotatorclass__ = TableAnnotator
| HaseloffLab/MarpoDB | server/tables.py | Python | mit | 2,195 |
"""Tests for letsencrypt.renewer."""
import datetime
import os
import tempfile
import shutil
import unittest
import configobj
import mock
import pytz
from letsencrypt import configuration
from letsencrypt import errors
from letsencrypt.storage import ALL_FOUR
from letsencrypt.tests import test_util
CERT = test_util.load_cert('cert.pem')
def unlink_all(rc_object):
"""Unlink all four items associated with this RenewableCert."""
for kind in ALL_FOUR:
os.unlink(getattr(rc_object, kind))
def fill_with_sample_data(rc_object):
"""Put dummy data into all four files of this RenewableCert."""
for kind in ALL_FOUR:
with open(getattr(rc_object, kind), "w") as f:
f.write(kind)
class RenewableCertTests(unittest.TestCase):
# pylint: disable=too-many-public-methods
"""Tests for letsencrypt.renewer.*."""
def setUp(self):
from letsencrypt import storage
self.tempdir = tempfile.mkdtemp()
self.cli_config = configuration.RenewerConfiguration(
namespace=mock.MagicMock(config_dir=self.tempdir))
# TODO: maybe provide RenewerConfiguration.make_dirs?
os.makedirs(os.path.join(self.tempdir, "live", "example.org"))
os.makedirs(os.path.join(self.tempdir, "archive", "example.org"))
os.makedirs(os.path.join(self.tempdir, "configs"))
config = configobj.ConfigObj()
for kind in ALL_FOUR:
config[kind] = os.path.join(self.tempdir, "live", "example.org",
kind + ".pem")
config.filename = os.path.join(self.tempdir, "configs",
"example.org.conf")
self.defaults = configobj.ConfigObj()
self.test_rc = storage.RenewableCert(
config, self.defaults, self.cli_config)
def tearDown(self):
shutil.rmtree(self.tempdir)
def test_initialization(self):
self.assertEqual(self.test_rc.lineagename, "example.org")
for kind in ALL_FOUR:
self.assertEqual(
getattr(self.test_rc, kind), os.path.join(
self.tempdir, "live", "example.org", kind + ".pem"))
def test_renewal_bad_config(self):
"""Test that the RenewableCert constructor will complain if
the renewal configuration file doesn't end in ".conf" or if it
isn't a ConfigObj."""
from letsencrypt import storage
defaults = configobj.ConfigObj()
config = configobj.ConfigObj()
# These files don't exist and aren't created here; the point of the test
# is to confirm that the constructor rejects them outright because of
# the configfile's name.
for kind in ALL_FOUR:
config["cert"] = "nonexistent_" + kind + ".pem"
config.filename = "nonexistent_sillyfile"
self.assertRaises(
errors.CertStorageError, storage.RenewableCert, config, defaults)
self.assertRaises(TypeError, storage.RenewableCert, "fun", defaults)
def test_renewal_incomplete_config(self):
"""Test that the RenewableCert constructor will complain if
the renewal configuration file is missing a required file element."""
from letsencrypt import storage
defaults = configobj.ConfigObj()
config = configobj.ConfigObj()
config["cert"] = "imaginary_cert.pem"
# Here the required privkey is missing.
config["chain"] = "imaginary_chain.pem"
config["fullchain"] = "imaginary_fullchain.pem"
config.filename = "imaginary_config.conf"
self.assertRaises(
errors.CertStorageError, storage.RenewableCert, config, defaults)
def test_consistent(self): # pylint: disable=too-many-statements
oldcert = self.test_rc.cert
self.test_rc.cert = "relative/path"
# Absolute path for item requirement
self.assertFalse(self.test_rc.consistent())
self.test_rc.cert = oldcert
# Items must exist requirement
self.assertFalse(self.test_rc.consistent())
# Items must be symlinks requirements
fill_with_sample_data(self.test_rc)
self.assertFalse(self.test_rc.consistent())
unlink_all(self.test_rc)
# Items must point to desired place if they are relative
for kind in ALL_FOUR:
os.symlink(os.path.join("..", kind + "17.pem"),
getattr(self.test_rc, kind))
self.assertFalse(self.test_rc.consistent())
unlink_all(self.test_rc)
# Items must point to desired place if they are absolute
for kind in ALL_FOUR:
os.symlink(os.path.join(self.tempdir, kind + "17.pem"),
getattr(self.test_rc, kind))
self.assertFalse(self.test_rc.consistent())
unlink_all(self.test_rc)
# Items must point to things that exist
for kind in ALL_FOUR:
os.symlink(os.path.join("..", "..", "archive", "example.org",
kind + "17.pem"),
getattr(self.test_rc, kind))
self.assertFalse(self.test_rc.consistent())
# This version should work
fill_with_sample_data(self.test_rc)
self.assertTrue(self.test_rc.consistent())
# Items must point to things that follow the naming convention
os.unlink(self.test_rc.fullchain)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"fullchain_17.pem"), self.test_rc.fullchain)
with open(self.test_rc.fullchain, "w") as f:
f.write("wrongly-named fullchain")
self.assertFalse(self.test_rc.consistent())
def test_current_target(self):
# Relative path logic
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert17.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
self.assertTrue(os.path.samefile(self.test_rc.current_target("cert"),
os.path.join(self.tempdir, "archive",
"example.org",
"cert17.pem")))
# Absolute path logic
os.unlink(self.test_rc.cert)
os.symlink(os.path.join(self.tempdir, "archive", "example.org",
"cert17.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
self.assertTrue(os.path.samefile(self.test_rc.current_target("cert"),
os.path.join(self.tempdir, "archive",
"example.org",
"cert17.pem")))
def test_current_version(self):
for ver in (1, 5, 10, 20):
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert{0}.pem".format(ver)),
self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
os.unlink(self.test_rc.cert)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert10.pem"), self.test_rc.cert)
self.assertEqual(self.test_rc.current_version("cert"), 10)
def test_no_current_version(self):
self.assertEqual(self.test_rc.current_version("cert"), None)
def test_latest_and_next_versions(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(self.test_rc.latest_common_version(), 5)
self.assertEqual(self.test_rc.next_free_version(), 6)
# Having one kind of file of a later version doesn't change the
# result
os.unlink(self.test_rc.privkey)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"privkey7.pem"), self.test_rc.privkey)
with open(self.test_rc.privkey, "w") as f:
f.write("privkey")
self.assertEqual(self.test_rc.latest_common_version(), 5)
# ... although it does change the next free version
self.assertEqual(self.test_rc.next_free_version(), 8)
# Nor does having three out of four change the result
os.unlink(self.test_rc.cert)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert7.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
os.unlink(self.test_rc.fullchain)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"fullchain7.pem"), self.test_rc.fullchain)
with open(self.test_rc.fullchain, "w") as f:
f.write("fullchain")
self.assertEqual(self.test_rc.latest_common_version(), 5)
# If we have everything from a much later version, it does change
# the result
ver = 17
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(self.test_rc.latest_common_version(), 17)
self.assertEqual(self.test_rc.next_free_version(), 18)
def test_update_link_to(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(ver, self.test_rc.current_version(kind))
self.test_rc.update_link_to("cert", 3)
self.test_rc.update_link_to("privkey", 2)
self.assertEqual(3, self.test_rc.current_version("cert"))
self.assertEqual(2, self.test_rc.current_version("privkey"))
self.assertEqual(5, self.test_rc.current_version("chain"))
self.assertEqual(5, self.test_rc.current_version("fullchain"))
# Currently we are allowed to update to a version that doesn't exist
self.test_rc.update_link_to("chain", 3000)
# However, current_version doesn't allow querying the resulting
# version (because it's a broken link).
self.assertEqual(os.path.basename(os.readlink(self.test_rc.chain)),
"chain3000.pem")
def test_version(self):
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert12.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
# TODO: We should probably test that the directory is still the
# same, but it's tricky because we can get an absolute
# path out when we put a relative path in.
self.assertEqual("cert8.pem",
os.path.basename(self.test_rc.version("cert", 8)))
def test_update_all_links_to(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(ver, self.test_rc.current_version(kind))
self.assertEqual(self.test_rc.latest_common_version(), 5)
for ver in xrange(1, 6):
self.test_rc.update_all_links_to(ver)
for kind in ALL_FOUR:
self.assertEqual(ver, self.test_rc.current_version(kind))
self.assertEqual(self.test_rc.latest_common_version(), 5)
def test_has_pending_deployment(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(ver, self.test_rc.current_version(kind))
for ver in xrange(1, 6):
self.test_rc.update_all_links_to(ver)
for kind in ALL_FOUR:
self.assertEqual(ver, self.test_rc.current_version(kind))
if ver < 5:
self.assertTrue(self.test_rc.has_pending_deployment())
else:
self.assertFalse(self.test_rc.has_pending_deployment())
def _test_notafterbefore(self, function, timestamp):
test_cert = test_util.load_vector("cert.pem")
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert12.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write(test_cert)
desired_time = datetime.datetime.utcfromtimestamp(timestamp)
desired_time = desired_time.replace(tzinfo=pytz.UTC)
for result in (function(), function(12)):
self.assertEqual(result, desired_time)
self.assertEqual(result.utcoffset(), datetime.timedelta(0))
def test_notbefore(self):
self._test_notafterbefore(self.test_rc.notbefore, 1418337285)
# 2014-12-11 22:34:45+00:00 = Unix time 1418337285
def test_notafter(self):
self._test_notafterbefore(self.test_rc.notafter, 1418942085)
# 2014-12-18 22:34:45+00:00 = Unix time 1418942085
@mock.patch("letsencrypt.storage.datetime")
def test_time_interval_judgments(self, mock_datetime):
"""Test should_autodeploy() and should_autorenew() on the basis
of expiry time windows."""
test_cert = test_util.load_vector("cert.pem")
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}12.pem".format(kind)), where)
with open(where, "w") as f:
f.write(kind)
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}11.pem".format(kind)), where)
with open(where, "w") as f:
f.write(kind)
self.test_rc.update_all_links_to(12)
with open(self.test_rc.cert, "w") as f:
f.write(test_cert)
self.test_rc.update_all_links_to(11)
with open(self.test_rc.cert, "w") as f:
f.write(test_cert)
mock_datetime.timedelta = datetime.timedelta
for (current_time, interval, result) in [
# 2014-12-13 12:00:00+00:00 (about 5 days prior to expiry)
# Times that should result in autorenewal/autodeployment
(1418472000, "2 months", True), (1418472000, "1 week", True),
# Times that should not
(1418472000, "4 days", False), (1418472000, "2 days", False),
# 2009-05-01 12:00:00+00:00 (about 5 years prior to expiry)
# Times that should result in autorenewal/autodeployment
(1241179200, "7 years", True),
(1241179200, "11 years 2 months", True),
# Times that should not
(1241179200, "8 hours", False), (1241179200, "2 days", False),
(1241179200, "40 days", False), (1241179200, "9 months", False),
# 2015-01-01 (after expiry has already happened, so all
# intervals should cause autorenewal/autodeployment)
(1420070400, "0 seconds", True),
(1420070400, "10 seconds", True),
(1420070400, "10 minutes", True),
(1420070400, "10 weeks", True), (1420070400, "10 months", True),
(1420070400, "10 years", True), (1420070400, "99 months", True),
]:
sometime = datetime.datetime.utcfromtimestamp(current_time)
mock_datetime.datetime.utcnow.return_value = sometime
self.test_rc.configuration["deploy_before_expiry"] = interval
self.test_rc.configuration["renew_before_expiry"] = interval
self.assertEqual(self.test_rc.should_autodeploy(), result)
self.assertEqual(self.test_rc.should_autorenew(), result)
def test_should_autodeploy(self):
"""Test should_autodeploy() on the basis of reasons other than
expiry time window."""
# pylint: disable=too-many-statements
# Autodeployment turned off
self.test_rc.configuration["autodeploy"] = "0"
self.assertFalse(self.test_rc.should_autodeploy())
self.test_rc.configuration["autodeploy"] = "1"
# No pending deployment
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertFalse(self.test_rc.should_autodeploy())
@mock.patch("letsencrypt.storage.RenewableCert.ocsp_revoked")
def test_should_autorenew(self, mock_ocsp):
"""Test should_autorenew on the basis of reasons other than
expiry time window."""
# pylint: disable=too-many-statements
# Autorenewal turned off
self.test_rc.configuration["autorenew"] = "0"
self.assertFalse(self.test_rc.should_autorenew())
self.test_rc.configuration["autorenew"] = "1"
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}12.pem".format(kind)), where)
with open(where, "w") as f:
f.write(kind)
# Mandatory renewal on the basis of OCSP revocation
mock_ocsp.return_value = True
self.assertTrue(self.test_rc.should_autorenew())
mock_ocsp.return_value = False
def test_save_successor(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.test_rc.update_all_links_to(3)
self.assertEqual(6, self.test_rc.save_successor(3, "new cert", None,
"new chain"))
with open(self.test_rc.version("cert", 6)) as f:
self.assertEqual(f.read(), "new cert")
with open(self.test_rc.version("chain", 6)) as f:
self.assertEqual(f.read(), "new chain")
with open(self.test_rc.version("fullchain", 6)) as f:
self.assertEqual(f.read(), "new cert" + "new chain")
# version 6 of the key should be a link back to version 3
self.assertFalse(os.path.islink(self.test_rc.version("privkey", 3)))
self.assertTrue(os.path.islink(self.test_rc.version("privkey", 6)))
# Let's try two more updates
self.assertEqual(7, self.test_rc.save_successor(6, "again", None,
"newer chain"))
self.assertEqual(8, self.test_rc.save_successor(7, "hello", None,
"other chain"))
# All of the subsequent versions should link directly to the original
# privkey.
for i in (6, 7, 8):
self.assertTrue(os.path.islink(self.test_rc.version("privkey", i)))
self.assertEqual("privkey3.pem", os.path.basename(os.readlink(
self.test_rc.version("privkey", i))))
for kind in ALL_FOUR:
self.assertEqual(self.test_rc.available_versions(kind), range(1, 9))
self.assertEqual(self.test_rc.current_version(kind), 3)
# Test updating from latest version rather than old version
self.test_rc.update_all_links_to(8)
self.assertEqual(9, self.test_rc.save_successor(8, "last", None,
"attempt"))
for kind in ALL_FOUR:
self.assertEqual(self.test_rc.available_versions(kind),
range(1, 10))
self.assertEqual(self.test_rc.current_version(kind), 8)
with open(self.test_rc.version("fullchain", 9)) as f:
self.assertEqual(f.read(), "last" + "attempt")
# Test updating when providing a new privkey. The key should
# be saved in a new file rather than creating a new symlink.
self.assertEqual(10, self.test_rc.save_successor(9, "with", "a",
"key"))
self.assertTrue(os.path.exists(self.test_rc.version("privkey", 10)))
self.assertFalse(os.path.islink(self.test_rc.version("privkey", 10)))
def test_new_lineage(self):
"""Test for new_lineage() class method."""
from letsencrypt import storage
result = storage.RenewableCert.new_lineage(
"the-lineage.com", "cert", "privkey", "chain", None,
self.defaults, self.cli_config)
# This consistency check tests most relevant properties about the
# newly created cert lineage.
self.assertTrue(result.consistent())
self.assertTrue(os.path.exists(os.path.join(
self.cli_config.renewal_configs_dir, "the-lineage.com.conf")))
with open(result.fullchain) as f:
self.assertEqual(f.read(), "cert" + "chain")
# Let's do it again and make sure it makes a different lineage
result = storage.RenewableCert.new_lineage(
"the-lineage.com", "cert2", "privkey2", "chain2", None,
self.defaults, self.cli_config)
self.assertTrue(os.path.exists(os.path.join(
self.cli_config.renewal_configs_dir, "the-lineage.com-0001.conf")))
# Now trigger the detection of already existing files
os.mkdir(os.path.join(
self.cli_config.live_dir, "the-lineage.com-0002"))
self.assertRaises(errors.CertStorageError,
storage.RenewableCert.new_lineage,
"the-lineage.com", "cert3", "privkey3", "chain3",
None, self.defaults, self.cli_config)
os.mkdir(os.path.join(self.cli_config.archive_dir, "other-example.com"))
self.assertRaises(errors.CertStorageError,
storage.RenewableCert.new_lineage,
"other-example.com", "cert4", "privkey4", "chain4",
None, self.defaults, self.cli_config)
# Make sure it can accept renewal parameters
params = {"stuff": "properties of stuff", "great": "awesome"}
result = storage.RenewableCert.new_lineage(
"the-lineage.com", "cert2", "privkey2", "chain2",
params, self.defaults, self.cli_config)
# TODO: Conceivably we could test that the renewal parameters actually
# got saved
def test_new_lineage_nonexistent_dirs(self):
"""Test that directories can be created if they don't exist."""
from letsencrypt import storage
shutil.rmtree(self.cli_config.renewal_configs_dir)
shutil.rmtree(self.cli_config.archive_dir)
shutil.rmtree(self.cli_config.live_dir)
storage.RenewableCert.new_lineage(
"the-lineage.com", "cert2", "privkey2", "chain2",
None, self.defaults, self.cli_config)
self.assertTrue(os.path.exists(
os.path.join(
self.cli_config.renewal_configs_dir, "the-lineage.com.conf")))
self.assertTrue(os.path.exists(os.path.join(
self.cli_config.live_dir, "the-lineage.com", "privkey.pem")))
self.assertTrue(os.path.exists(os.path.join(
self.cli_config.archive_dir, "the-lineage.com", "privkey1.pem")))
@mock.patch("letsencrypt.storage.le_util.unique_lineage_name")
def test_invalid_config_filename(self, mock_uln):
from letsencrypt import storage
mock_uln.return_value = "this_does_not_end_with_dot_conf", "yikes"
self.assertRaises(errors.CertStorageError,
storage.RenewableCert.new_lineage,
"example.com", "cert", "privkey", "chain",
None, self.defaults, self.cli_config)
def test_bad_kind(self):
self.assertRaises(
errors.CertStorageError, self.test_rc.current_target, "elephant")
self.assertRaises(
errors.CertStorageError, self.test_rc.current_version, "elephant")
self.assertRaises(
errors.CertStorageError, self.test_rc.version, "elephant", 17)
self.assertRaises(
errors.CertStorageError,
self.test_rc.available_versions, "elephant")
self.assertRaises(
errors.CertStorageError,
self.test_rc.newest_available_version, "elephant")
self.assertRaises(
errors.CertStorageError,
self.test_rc.update_link_to, "elephant", 17)
def test_ocsp_revoked(self):
# XXX: This is currently hardcoded to False due to a lack of an
# OCSP server to test against.
self.assertFalse(self.test_rc.ocsp_revoked())
def test_parse_time_interval(self):
from letsencrypt import storage
# XXX: I'm not sure if intervals related to years and months
# take account of the current date (if so, some of these
# may fail in the future, like in leap years or even in
# months of different lengths!)
intended = {"": 0, "17 days": 17, "23": 23, "1 month": 31,
"7 weeks": 49, "1 year 1 day": 366, "1 year-1 day": 364,
"4 years": 1461}
for time in intended:
self.assertEqual(storage.parse_time_interval(time),
datetime.timedelta(intended[time]))
@mock.patch("letsencrypt.renewer.plugins_disco")
@mock.patch("letsencrypt.account.AccountFileStorage")
@mock.patch("letsencrypt.client.Client")
def test_renew(self, mock_c, mock_acc_storage, mock_pd):
from letsencrypt import renewer
test_cert = test_util.load_vector("cert-san.pem")
for kind in ALL_FOUR:
os.symlink(os.path.join("..", "..", "archive", "example.org",
kind + "1.pem"),
getattr(self.test_rc, kind))
fill_with_sample_data(self.test_rc)
with open(self.test_rc.cert, "w") as f:
f.write(test_cert)
# Fails because renewalparams are missing
self.assertFalse(renewer.renew(self.test_rc, 1))
self.test_rc.configfile["renewalparams"] = {"some": "stuff"}
# Fails because there's no authenticator specified
self.assertFalse(renewer.renew(self.test_rc, 1))
self.test_rc.configfile["renewalparams"]["rsa_key_size"] = "2048"
self.test_rc.configfile["renewalparams"]["server"] = "acme.example.com"
self.test_rc.configfile["renewalparams"]["authenticator"] = "fake"
self.test_rc.configfile["renewalparams"]["dvsni_port"] = "4430"
self.test_rc.configfile["renewalparams"]["account"] = "abcde"
mock_auth = mock.MagicMock()
mock_pd.PluginsRegistry.find_all.return_value = {"apache": mock_auth}
# Fails because "fake" != "apache"
self.assertFalse(renewer.renew(self.test_rc, 1))
self.test_rc.configfile["renewalparams"]["authenticator"] = "apache"
mock_client = mock.MagicMock()
# pylint: disable=star-args
mock_client.obtain_certificate.return_value = (
mock.MagicMock(body=CERT), CERT, mock.Mock(pem="key"),
mock.sentinel.csr)
mock_c.return_value = mock_client
self.assertEqual(2, renewer.renew(self.test_rc, 1))
# TODO: We could also make several assertions about calls that should
# have been made to the mock functions here.
mock_acc_storage().load.assert_called_once_with(account_id="abcde")
mock_client.obtain_certificate.return_value = (
mock.sentinel.certr, None, mock.sentinel.key, mock.sentinel.csr)
# This should fail because the renewal itself appears to fail
self.assertFalse(renewer.renew(self.test_rc, 1))
@mock.patch("letsencrypt.renewer.notify")
@mock.patch("letsencrypt.storage.RenewableCert")
@mock.patch("letsencrypt.renewer.renew")
def test_main(self, mock_renew, mock_rc, mock_notify):
"""Test for main() function."""
from letsencrypt import renewer
mock_rc_instance = mock.MagicMock()
mock_rc_instance.should_autodeploy.return_value = True
mock_rc_instance.should_autorenew.return_value = True
mock_rc_instance.latest_common_version.return_value = 10
mock_rc.return_value = mock_rc_instance
with open(os.path.join(self.cli_config.renewal_configs_dir,
"README"), "w") as f:
f.write("This is a README file to make sure that the renewer is")
f.write("able to correctly ignore files that don't end in .conf.")
with open(os.path.join(self.cli_config.renewal_configs_dir,
"example.org.conf"), "w") as f:
# This isn't actually parsed in this test; we have a separate
# test_initialization that tests the initialization, assuming
# that configobj can correctly parse the config file.
f.write("cert = cert.pem\nprivkey = privkey.pem\n")
f.write("chain = chain.pem\nfullchain = fullchain.pem\n")
with open(os.path.join(self.cli_config.renewal_configs_dir,
"example.com.conf"), "w") as f:
f.write("cert = cert.pem\nprivkey = privkey.pem\n")
f.write("chain = chain.pem\nfullchain = fullchain.pem\n")
renewer.main(self.defaults, args=[
'--config-dir', self.cli_config.config_dir])
self.assertEqual(mock_rc.call_count, 2)
self.assertEqual(mock_rc_instance.update_all_links_to.call_count, 2)
self.assertEqual(mock_notify.notify.call_count, 4)
self.assertEqual(mock_renew.call_count, 2)
# If we have instances that don't need any work done, no work should
# be done (call counts associated with processing deployments or
# renewals should not increase).
mock_happy_instance = mock.MagicMock()
mock_happy_instance.should_autodeploy.return_value = False
mock_happy_instance.should_autorenew.return_value = False
mock_happy_instance.latest_common_version.return_value = 10
mock_rc.return_value = mock_happy_instance
renewer.main(self.defaults, args=[
'--config-dir', self.cli_config.config_dir])
self.assertEqual(mock_rc.call_count, 4)
self.assertEqual(mock_happy_instance.update_all_links_to.call_count, 0)
self.assertEqual(mock_notify.notify.call_count, 4)
self.assertEqual(mock_renew.call_count, 2)
def test_bad_config_file(self):
from letsencrypt import renewer
with open(os.path.join(self.cli_config.renewal_configs_dir,
"bad.conf"), "w") as f:
f.write("incomplete = configfile\n")
renewer.main(self.defaults, args=[
'--config-dir', self.cli_config.config_dir])
# The errors.CertStorageError is caught inside and nothing happens.
if __name__ == "__main__":
unittest.main() # pragma: no cover
| hlieberman/letsencrypt | letsencrypt/tests/renewer_test.py | Python | apache-2.0 | 33,095 |
#!/usr/bin/python
# coding: UTF-8
# Driver for HD44780 LCD display on the RPi
# Written by: Ron Ritchey
# Derived from Lardconcepts
# https://gist.github.com/lardconcepts/4947360
# Which was also drived from Adafruit
# http://forums.adafruit.com/viewtopic.php?f=8&t=29207&start=15#p163445
#
# Useful references
# General overview of HD44780 style displays
# https://en.wikipedia.org/wiki/Hitachi_HD44780_LCD_controller
#
# More detail on initialization and timing
# http://web.alfredstate.edu/weimandn/lcd/lcd_initialization/lcd_initialization_index.html
#
import time, math,logging
import lcd_display_driver
import fonts
from PIL import Image
import graphics
try:
import RPi.GPIO as GPIO
except:
logging.debug("RPi.GPIO not installed")
class hd44780(lcd_display_driver.lcd_display_driver):
# commands
LCD_CLEARDISPLAY = 0x01
LCD_RETURNHOME = 0x02
LCD_ENTRYMODESET = 0x04
LCD_DISPLAYCONTROL = 0x08
LCD_CURSORSHIFT = 0x10
LCD_FUNCTIONSET = 0x20
LCD_SETCGRAMADDR = 0x40
LCD_SETDDRAMADDR = 0x80
# flags for display entry mode
LCD_ENTRYRIGHT = 0x00
LCD_ENTRYLEFT = 0x02
LCD_ENTRYSHIFTINCREMENT = 0x01
LCD_ENTRYSHIFTDECREMENT = 0x00
# flags for display on/off control
LCD_DISPLAYON = 0x04
LCD_DISPLAYOFF = 0x00
LCD_CURSORON = 0x02
LCD_CURSOROFF = 0x00
LCD_BLINKON = 0x01
LCD_BLINKOFF = 0x00
# flags for display/cursor shift
LCD_DISPLAYMOVE = 0x08
LCD_CURSORMOVE = 0x00
# flags for display/cursor shift
LCD_DISPLAYMOVE = 0x08
LCD_CURSORMOVE = 0x00
LCD_MOVERIGHT = 0x04
LCD_MOVELEFT = 0x00
# flags for function set
LCD_8BITMODE = 0x10
LCD_4BITMODE = 0x00
LCD_2LINE = 0x08
LCD_1LINE = 0x00
LCD_5x10s = 0x04
LCD_5x8DOTS = 0x00
character_translation = [
0, 1, 2, 3, 4, 5, 6, 7,255, -1, -1, -1, -1, -1, -1, -1, #0
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, #16
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, #32
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, #48
64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, #64
80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 97, 93, 94, 95, #80
96, 97, 98, 99,100,101,102,103,104,105,106,107,108,109,110,111, #96
112,113,114,115,116,117,118,119,120,121,122, -1,124,125,126,127, #112
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, #128
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, #144
32,234,236,237, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, #160
223, -1, -1, -1, -1,228, -1,176, -1, -1, -1, -1, -1, -1, -1, -1, #176
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, #192
-1, 78, -1, -1, -1, -1, -1,235, -1, -1, -1, -1, -1, -1, -1,226, #208
-1, -1, -1, -1,225, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, #224
-1,238, -1, -1, -1, -1,239,253, -1, -1, -1, -1,245, -1, -1, -1 ] #240
def __init__(self, rows=16, cols=80, rs=7, e=8, datalines=[25, 24, 23, 27]):
# Default arguments are appropriate for Raspdac V3 only!!!
self.pins_db = datalines
self.pin_rs = rs
self.pin_e = e
self.rows = rows
self.cols = cols
self.rows_char = rows/8
self.cols_char = cols/5
self.curposition = (0,0)
# image buffer to hold current display contents. Used to prevent unnecessary refreshes
self.curimage = Image.new("1", (self.cols, self.rows))
self.FONTS_SUPPORTED = True
# Initialize the default font
self.font = fonts.bmfont.bmfont('latin1_5x8_fixed.fnt')
self.fp = self.font.fontpkg
# Sets the values to offset into DDRAM for different display lines
self.row_offsets = [ 0x00, 0x40, 0x14, 0x54 ]
# Set GPIO pins to handle communications to display
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
for pin in self.pins_db:
GPIO.setup(pin, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(self.pin_e, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(self.pin_rs, GPIO.OUT, initial=GPIO.LOW)
# there is a good writeup on the HD44780 at Wikipedia
# https://en.wikipedia.org/wiki/Hitachi_HD44780_LCD_controller
self.write4bits(0x33,False)
self.write4bits(0x32,False)
# Initialize display control, function, and mode registers.
displaycontrol = self.LCD_DISPLAYON | self.LCD_CURSOROFF | self.LCD_BLINKOFF
displayfunction = self.LCD_4BITMODE | self.LCD_1LINE | self.LCD_2LINE | self.LCD_5x8DOTS
displaymode = self.LCD_ENTRYLEFT | self.LCD_ENTRYSHIFTDECREMENT
# Write registers.
self.delayMicroseconds(1000)
self.write4bits(self.LCD_DISPLAYCONTROL | displaycontrol, False)
self.delayMicroseconds(2000)
self.write4bits(self.LCD_FUNCTIONSET | displayfunction, False)
self.write4bits(self.LCD_ENTRYMODESET | displaymode, False) # set the entry mode
self.clear()
# Set up parent class.
super(hd44780, self).__init__(rows,cols)
def createcustom(self, image):
if self.currentcustom == 0:
# initialize custom font memory
self.customfontlookup = {}
# The image should only be 5x8 but if larger, crop it
img = image.crop( (0,0,5,8) )
imgdata = list(img.convert("1").getdata())
# Check to see if a custom character has already been created for this image
if tuple(imgdata) in self.customfontlookup:
return self.customfontlookup[tuple(imgdata)]
# If there is space, create a custom character using the image provided
if self.currentcustom > 7:
return ord('?')
# Set pointer to position char in CGRAM
self.write4bits(self.LCD_SETCGRAMADDR+(self.currentcustom*8))
# Increment currentcustom to point to the next custom char position
self.currentcustom += 1
# For each line of data from the image
for j in range(8):
line = 0
# Computer a five bit value
for i in range(5):
if imgdata[j*5+i]:
line |= 1<<4-i
# And then send it to the custom character memory region for the current customer character
self.write4bits(line, True)
# Save custom character in lookup table
self.customfontlookup[tuple(imgdata)] = self.currentcustom - 1
# Return the custom character position. We have to subtract one as we incremented it earlier in the function
return self.currentcustom - 1
def compare(self, image, position):
imgdata = tuple(list(image.getdata()))
disdata = tuple(list(self.curimage.crop((position[0], position[1], position[0]+5, position[1]+8)).getdata()))
if imgdata == disdata:
return True
return False
def update(self, image):
# Make image the same size as the display
img = image.crop( (0,0,self.cols, self.rows))
# Make image black and white
img = img.convert("1")
# For each character sized cell from image, try to determine what character it is
# by comparing it against the font reverse lookup dictionary
# If you find a matching entry, output the cooresponding unicode value
# else output a '?' symbol
self.currentcustom = 0
for j in range(self.rows_char):
for i in range(self.cols_char):
imgtest = img.crop( (i*5, j*8, (i+1)*5, (j+1)*8) )
# Check to see if the img is the same as was previously updated
# If it is, skip to the next character
# if self.compare(imgtest, (i*5, j*5)):
# continue
imgdata = tuple(list(imgtest.getdata()))
char = self.font.imglookup[imgdata] if imgdata in self.font.imglookup else self.createcustom(imgtest)
#print "Using char {0}".format(char)
#frame = graphics.getframe(imgtest,0,0,5,8)
#graphics.show(frame,5,1)
# Check to see if there is a character in the font table that matches. If not, try to create a custom character for it.
char = self.character_translation[char] if self.character_translation[char] >= 0 else self.createcustom(imgtest)
# Write the resulting character value to the display
self.setCursor(i,j)
self.write4bits(char, True)
# Save the current image to curimage
self.curimage.paste(image.crop((0,0,self.cols,self.rows)),(0,0))
self.setCursor(0,0)
displaycontrol = self.LCD_DISPLAYON | self.LCD_CURSOROFF | self.LCD_BLINKOFF
self.write4bits(self.LCD_DISPLAYCONTROL | displaycontrol, False)
def clear(self):
# Set cursor back to 0,0
self.setCursor(0,0)
self.curposition = (0,0)
self.curimage = Image.new("1",(self.cols,self.rows))
# And then clear the screen
self.write4bits(self.LCD_CLEARDISPLAY) # command to clear display
self.delayMicroseconds(2000) # 2000 microsecond sleep, clearing the display takes a long time
def setCursor(self, col_char, row_char):
if row_char > self.rows_char or col_char > self.cols_char:
raise IndexError
if (row_char > self.rows_char):
row = self.rows_char - 1 # we count rows starting w/0
self.write4bits(self.LCD_SETDDRAMADDR | (col_char + self.row_offsets[row_char]))
self.curposition = (col_char, row_char)
def loadcustomchars(self, char, fontdata):
# Load custom characters
# Verify that there is room in the display
# Only 8 special characters allowed
if len(fontdata) + char > 8:
logging.debug("Can not load fontset at position {0}. Not enough room left".format(char))
raise IndexError
# Set pointer to position char in CGRAM
self.write4bits(self.LCD_SETCGRAMADDR+(char*8))
# Need a short sleep for display to stablize
time.sleep(.01)
# For each font in fontdata
for font in fontdata:
for byte in font:
self.write4bits(byte, True)
def message(self, text, row_char=0, col_char=0):
''' Send string to LCD. Newline wraps to second line'''
if row_char > self.rows_char or col_char > self.cols_char:
raise IndexError
self.setCursor(col_char, row_char)
for char in text:
if char == '\n':
row = self.curposition[1]+1 if row < self.rows_char else self.curposition[1]
self.setCursor(0, row)
else:
# Translate incoming character into correct value for European charset
# and then send it to display. Use space if character is out of range.
c = ord(char)
if c > 255: c = 32
ct = self.character_translation[c]
if ct > 0:
self.write4bits(self.character_translation[c], True)
def cleanup(self):
GPIO.cleanup()
def msgtest(self, text, wait=1.5):
self.clear()
lcd.message(text)
time.sleep(wait)
if __name__ == '__main__':
import getopt,sys
try:
opts, args = getopt.getopt(sys.argv[1:],"hr:c:",["row=","col=","rs=","e=","d4=","d5=","d6=", "d7="])
except getopt.GetoptError:
print 'hd44780.py -r <rows> -c <cols> --rs <rs> --e <e> --d4 <d4> --d5 <d5> --d6 <d6> --d7 <d7>'
sys.exit(2)
# Set defaults
# These are for the wiring used by a Raspdac V3
rows = 16
cols = 80
rs = 7
e = 8
d4 = 25
d5 = 24
d6 = 23
d7 = 27
for opt, arg in opts:
if opt == '-h':
print 'hd44780.py -r <rows> -c <cols> --rs <rs> --e <e> --d4 <d4> --d5 <d5> --d6 <d6> --d7 <d7>'
sys.exit()
elif opt in ("-r", "--rows"):
rows = int(arg)
elif opt in ("-c", "--cols"):
cols = int(arg)
elif opt in ("--rs"):
rs = int(arg)
elif opt in ("--e"):
e = int(arg)
elif opt in ("--d4"):
d4 = int(arg)
elif opt in ("--d5"):
d5 = int(arg)
elif opt in ("--d6"):
d6 = int(arg)
elif opt in ("--d7"):
d7 = int(arg)
try:
pins = [d4, d5, d6, d7]
print "HD44780 LCD Display Test"
print "ROWS={0}, COLS={1}, RS={2}, E={3}, Pins={4}".format(rows,cols,rs,e,pins)
lcd = hd44780(rows,cols,rs,e,[d4, d5, d6, d7])
lcd.clear()
lcd.message("HD44780 LCD\nPi Powered")
time.sleep(4)
lcd.clear()
time.sleep(2)
except KeyboardInterrupt:
pass
finally:
try:
lcd.clear()
lcd.message("Goodbye!")
time.sleep(2)
lcd.clear()
except:
pass
time.sleep(.5)
GPIO.cleanup()
print u"LCD Display Test Complete"
| dhrone/pydKeg | displays/hd44780.py | Python | mit | 11,500 |
from __future__ import with_statement
from sympy.matrices.expressions.blockmatrix import (block_collapse, bc_matmul,
bc_block_plus_ident, BlockDiagMatrix, BlockMatrix, bc_dist, bc_matadd,
bc_transpose, blockcut, reblock_2x2, deblock)
from sympy.matrices.expressions import (MatrixSymbol, Identity, MatMul,
Inverse, trace, Transpose, det)
from sympy.matrices import Matrix, ImmutableMatrix
from sympy.core import Tuple, symbols, Expr
from sympy.functions import transpose
i, j, k, l, m, n, p = symbols('i:n, p', integer=True)
A = MatrixSymbol('A', n, n)
B = MatrixSymbol('B', n, n)
C = MatrixSymbol('C', n, n)
D = MatrixSymbol('D', n, n)
G = MatrixSymbol('G', n, n)
H = MatrixSymbol('H', n, n)
b1 = BlockMatrix([[G, H]])
b2 = BlockMatrix([[G], [H]])
def test_bc_matmul():
assert bc_matmul(H*b1*b2*G) == BlockMatrix([[(H*G*G + H*H*H)*G]])
def test_bc_matadd():
assert bc_matadd(BlockMatrix([[G, H]]) + BlockMatrix([[H, H]])) == \
BlockMatrix([[G+H, H+H]])
def test_bc_transpose():
assert bc_transpose(Transpose(BlockMatrix([[A, B], [C, D]]))) == \
BlockMatrix([[A.T, C.T], [B.T, D.T]])
def test_bc_dist_diag():
A = MatrixSymbol('A', n, n)
B = MatrixSymbol('B', m, m)
C = MatrixSymbol('C', l, l)
X = BlockDiagMatrix(A, B, C)
assert bc_dist(X+X).equals(BlockDiagMatrix(2*A, 2*B, 2*C))
def test_block_plus_ident():
A = MatrixSymbol('A', n, n)
B = MatrixSymbol('B', n, m)
C = MatrixSymbol('C', m, n)
D = MatrixSymbol('D', m, m)
X = BlockMatrix([[A, B], [C, D]])
assert bc_block_plus_ident(X+Identity(m+n)) == \
BlockDiagMatrix(Identity(n), Identity(m)) + X
def test_BlockMatrix():
A = MatrixSymbol('A', n, m)
B = MatrixSymbol('B', n, k)
C = MatrixSymbol('C', l, m)
D = MatrixSymbol('D', l, k)
M = MatrixSymbol('M', m + k, p)
N = MatrixSymbol('N', l + n, k + m)
X = BlockMatrix(Matrix([[A, B], [C, D]]))
assert X.__class__(*X.args) == X
# block_collapse does nothing on normal inputs
E = MatrixSymbol('E', n, m)
assert block_collapse(A + 2*E) == A + 2*E
F = MatrixSymbol('F', m, m)
assert block_collapse(E.T*A*F) == E.T*A*F
assert X.shape == (l + n, k + m)
assert X.blockshape == (2, 2)
assert transpose(X) == BlockMatrix(Matrix([[A.T, C.T], [B.T, D.T]]))
assert transpose(X).shape == X.shape[::-1]
# Test that BlockMatrices and MatrixSymbols can still mix
assert (X*M).is_MatMul
assert X._blockmul(M).is_MatMul
assert (X*M).shape == (n + l, p)
assert (X + N).is_MatAdd
assert X._blockadd(N).is_MatAdd
assert (X + N).shape == X.shape
E = MatrixSymbol('E', m, 1)
F = MatrixSymbol('F', k, 1)
Y = BlockMatrix(Matrix([[E], [F]]))
assert (X*Y).shape == (l + n, 1)
assert block_collapse(X*Y).blocks[0, 0] == A*E + B*F
assert block_collapse(X*Y).blocks[1, 0] == C*E + D*F
# block_collapse passes down into container objects, transposes, and inverse
assert block_collapse(transpose(X*Y)) == transpose(block_collapse(X*Y))
assert block_collapse(Tuple(X*Y, 2*X)) == (
block_collapse(X*Y), block_collapse(2*X))
# Make sure that MatrixSymbols will enter 1x1 BlockMatrix if it simplifies
Ab = BlockMatrix([[A]])
Z = MatrixSymbol('Z', *A.shape)
assert block_collapse(Ab + Z) == A + Z
def test_BlockMatrix_trace():
A, B, C, D = map(lambda s: MatrixSymbol(s, 3, 3), 'ABCD')
X = BlockMatrix([[A, B], [C, D]])
assert trace(X) == trace(A) + trace(D)
def test_BlockMatrix_Determinant():
A, B, C, D = map(lambda s: MatrixSymbol(s, 3, 3), 'ABCD')
X = BlockMatrix([[A, B], [C, D]])
from sympy import assuming, Q
with assuming(Q.invertible(A)):
assert det(X) == det(A) * det(D - C*A.I*B)
assert isinstance(det(X), Expr)
def test_squareBlockMatrix():
A = MatrixSymbol('A', n, n)
B = MatrixSymbol('B', n, m)
C = MatrixSymbol('C', m, n)
D = MatrixSymbol('D', m, m)
X = BlockMatrix([[A, B], [C, D]])
Y = BlockMatrix([[A]])
assert X.is_square
assert (block_collapse(X + Identity(m + n)) ==
BlockMatrix([[A + Identity(n), B], [C, D + Identity(m)]]))
Q = X + Identity(m + n)
assert (X + MatrixSymbol('Q', n + m, n + m)).is_MatAdd
assert (X * MatrixSymbol('Q', n + m, n + m)).is_MatMul
assert block_collapse(Y.I) == A.I
assert block_collapse(X.inverse()) == BlockMatrix([
[(-B*D.I*C + A).I, -A.I*B*(D + -C*A.I*B).I],
[-(D - C*A.I*B).I*C*A.I, (D - C*A.I*B).I]])
assert isinstance(X.inverse(), Inverse)
assert not X.is_Identity
Z = BlockMatrix([[Identity(n), B], [C, D]])
assert not Z.is_Identity
def test_BlockDiagMatrix():
A = MatrixSymbol('A', n, n)
B = MatrixSymbol('B', m, m)
C = MatrixSymbol('C', l, l)
M = MatrixSymbol('M', n + m + l, n + m + l)
X = BlockDiagMatrix(A, B, C)
Y = BlockDiagMatrix(A, 2*B, 3*C)
assert X.blocks[1, 1] == B
assert X.shape == (n + m + l, n + m + l)
assert all(X.blocks[i, j].is_ZeroMatrix if i != j else X.blocks[i, j] in [A, B, C]
for i in range(3) for j in range(3))
assert X.__class__(*X.args) == X
assert isinstance(block_collapse(X.I * X), Identity)
assert bc_matmul(X*X) == BlockDiagMatrix(A*A, B*B, C*C)
assert block_collapse(X*X) == BlockDiagMatrix(A*A, B*B, C*C)
#XXX: should be == ??
assert block_collapse(X + X).equals(BlockDiagMatrix(2*A, 2*B, 2*C))
assert block_collapse(X*Y) == BlockDiagMatrix(A*A, 2*B*B, 3*C*C)
assert block_collapse(X + Y) == BlockDiagMatrix(2*A, 3*B, 4*C)
# Ensure that BlockDiagMatrices can still interact with normal MatrixExprs
assert (X*(2*M)).is_MatMul
assert (X + (2*M)).is_MatAdd
assert (X._blockmul(M)).is_MatMul
assert (X._blockadd(M)).is_MatAdd
def test_blockcut():
A = MatrixSymbol('A', n, m)
B = blockcut(A, (n/2, n/2), (m/2, m/2))
assert A[i, j] == B[i, j]
assert B == BlockMatrix([[A[:n/2, :m/2], A[:n/2, m/2:]],
[A[n/2:, :m/2], A[n/2:, m/2:]]])
M = ImmutableMatrix(4, 4, range(16))
B = blockcut(M, (2, 2), (2, 2))
assert M == ImmutableMatrix(B)
B = blockcut(M, (1, 3), (2, 2))
assert ImmutableMatrix(B.blocks[0, 1]) == ImmutableMatrix([[2, 3]])
def test_reblock_2x2():
B = BlockMatrix([[MatrixSymbol('A_%d%d'%(i,j), 2, 2)
for j in range(3)]
for i in range(3)])
assert B.blocks.shape == (3, 3)
BB = reblock_2x2(B)
assert BB.blocks.shape == (2, 2)
assert B.shape == BB.shape
assert B.as_explicit() == BB.as_explicit()
def test_deblock():
B = BlockMatrix([[MatrixSymbol('A_%d%d'%(i,j), n, n)
for j in range(4)]
for i in range(4)])
assert deblock(reblock_2x2(B)) == B
| lidavidm/mathics-heroku | venv/lib/python2.7/site-packages/sympy/matrices/expressions/tests/test_blockmatrix.py | Python | gpl-3.0 | 6,848 |
#!/usr/bin/python3
from PIL import Image, ImageDraw
import glob
import os
def calc_color_conversion( im, src, dest ) :
'''
for each color in src, match the closest color in dest.
src = (u, v) where u and v are color, given in palette index.
dest = (p, q). Same as src.
'''
pal = im.getpalette()
# serialized. just pack them in 3.
# in RGB, not BGR, fortunately.
#for i in range( len( pal ) ) :
# print( pal[ i ], end=" " )
# if i % 3 == 2 :
# print()
def get_rgb( color ) :
r = pal[ color * 3 ]
g = pal[ color * 3 + 1 ]
b = pal[ color * 3 + 2 ]
return (r, g, b)
def mse3( v1, v2 ) :
s = 0
for i in range( 3 ) :
s += (v1[i] - v2[i])**2
return s/3
def calc_best_match( color, dest ) :
rgb = get_rgb( color )
#print( "input:", rgb )
best = -1
best_mse = float('inf')
for d in dest :
rgb2 = get_rgb( d )
mse = mse3( rgb, rgb2 )
#print( "mse with", rgb2, "=", mse )
if mse < best_mse :
best_mse = mse
best = d
return best
result = dict()
for color in src :
best_match = calc_best_match( color, dest )
result[ color ] = best_match
return result
def in_rect( x, y, rect ) :
u, v, w, h = rect
p = u + w
q = v + h
return u <= x and x < p and v <= y and y < q
def in_head_area( x, y ) :
if in_rect( x, y, (21, 6, 6, 5) ) :
return True
return False
def in_body_area( x, y ) :
if in_rect( x, y, (21, 18, 6, 11) ) :
return True
return False
def colorize( fname, ofname ) :
im = Image.open( fname )
px = im.load() # pixel access
# map faction colors to black and white. I don't want player color,
# except for small part.
conv_map_grey = calc_color_conversion( im, (80, 95), (128, 143) )
conv_map_fire = calc_color_conversion( im, (80, 95), (200, 207) )
# access individual pixels
for j in range( im.height ) :
for i in range( im.width ) :
if in_head_area( i, j ) :
# leave head as player color
continue
elif in_body_area( i, j ) :
# make area of player color body fire color
if 80 <= px[i, j] and px[i, j] <= 95 :
px[i, j] = conv_map_fire[ px[i, j] ]
else :
# make rest of the player color grey.
if 80 <= px[i, j] and px[i, j] <= 95 :
px[i, j] = conv_map_grey[ px[i, j] ]
# Lets save.
im.save( ofname )
def remove_house_color( im ) :
px = im.load() # pixel access
src = [ x for x in range( 80, 96 ) ] # house color is 80 -- 95.
dest = [ x for x in range( 0, 256 ) ] # all colors
for c in src :
dest.remove( c )
# remove house colors from dest.
mapper = calc_color_conversion( im, src, dest )
# apply
for j in range( im.height ) :
for i in range( im.width ) :
if px[i, j] in src :
px[i, j] = mapper[ px[i, j ] ]
def house_colorize( im ) :
src = [ x for x in range( 0, 256 ) ] # all colors
dest = [ x for x in range( 80, 96 ) ] # house color is 80 -- 95.
src.remove( 0 ) # 0 is transparency.
src.remove( 4 ) # 4 is shadow color.
mapper = calc_color_conversion( im, src, dest )
# apply
px = im.load() # pixel access
for j in range( im.height ) :
for i in range( im.width ) :
if in_rect( i, j, (46, 31, 13, 16) ) and px[i, j] in src :
px[i, j] = mapper[ px[i, j ] ]
elif in_rect( i, j, (8, 1, 23, 6) ) and px[i, j] in src :
px[i, j] = mapper[ px[i, j ] ]
def bld_colorize( im ) :
src = [ x for x in range( 0, 256 ) ] # all colors
dest = [ x for x in range( 128, 144 ) ] # grey shades are in 128 -- 143.
src.remove( 0 ) # 0 is transparency.
src.remove( 4 ) # 4 is shadow color.
mapper = calc_color_conversion( im, src, dest )
# apply
px = im.load() # pixel access
for j in range( im.height ) :
for i in range( im.width ) :
if px[i, j] in src :
px[i, j] = mapper[ px[i, j ] ]
if __name__ == "__main__" :
# Works on fire ant graphics.
# I've already segmented it so I can take advantage of it!
mapper1 = None
mapper2 = None
mapper3 = None
src1 = list( range( 128, 135+1 ) ) # bright to brighter
dest1 = list( range( 128, 131+1 ) )
src2 = list( range( 136, 143+1 ) ) # dark to darker
dest2 = list( range( 140, 143+1 ) )
src3 = list( range( 200, 207+1 ) ) # body to house color
dest3 = list( range( 80, 95+1 ) )
for fname in glob.glob( "ant*.png" ) :
print( fname )
im = Image.open( fname )
if not mapper1 :
mapper1 = calc_color_conversion( im, src1, dest1 )
mapper2 = calc_color_conversion( im, src2, dest2 )
mapper3 = calc_color_conversion( im, src3, dest3 )
px = im.load()
for j in range( im.height ) :
for i in range( im.width ) :
# First, move house color to grey area.
if 80 <= px[i, j] and px[i, j] <= 95 :
px[i, j] += 48
# We want to make grey area high contrast!
if px[i, j] in mapper1 :
px[i, j] = mapper1[ px[i, j ] ]
elif px[i, j] in mapper2 :
px[i, j] = mapper2[ px[i, j ] ]
elif px[i, j] in mapper3 :
px[i, j] = mapper3[ px[i, j ] ]
ofname = fname.replace( "ant1", "sant" )
im.save( ofname )
| forcecore/yupgi_alert0 | assets/shp/sant/make.py | Python | gpl-3.0 | 5,762 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.