code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ShippingStatus'
db.create_table(u'orders_shippingstatus', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('order', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orders.Order'])),
('status', self.gf('django.db.models.fields.CharField')(default='Not Shipped', max_length=120)),
('tracking_number', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal(u'orders', ['ShippingStatus'])
def backwards(self, orm):
# Deleting model 'ShippingStatus'
db.delete_table(u'orders_shippingstatus')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'cart.cart': {
'Meta': {'object_name': 'Cart'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'total': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '100', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'orders.order': {
'Meta': {'ordering': "['-status', '-cart']", 'object_name': 'Order'},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.Address']", 'null': 'True', 'blank': 'True'}),
'cart': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cart.Cart']"}),
'cc_four': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_id': ('django.db.models.fields.CharField', [], {'default': "'ABC123'", 'max_length': '120'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Starded'", 'max_length': '50'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'orders.shippingstatus': {
'Meta': {'object_name': 'ShippingStatus'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orders.Order']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Not Shipped'", 'max_length': '120'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'tracking_number': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'profiles.address': {
'Meta': {'object_name': 'Address'},
'address1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300'}),
'address2': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'default_address': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'shipping_address': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['orders'] | ferdyrod/basic-ecommerce | orders/migrations/0003_auto__add_shippingstatus.py | Python | apache-2.0 | 8,083 |
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from pathlib import Path
from django.conf import settings
from django.core.cache import caches
from django.test import TestCase
from django.test.utils import override_settings
from mock import ANY, MagicMock, Mock, patch
from lib.l10n_utils.gettext import (_append_to_lang_file, langfiles_for_path,
parse_python, parse_template,
po_msgs, pot_to_langfiles, template_is_active,
_get_template_tag_set, template_has_tag)
from lib.l10n_utils.tests import TempFileMixin
cache = caches['l10n']
ROOT_PATH = Path(__file__).with_name('test_files')
LOCALES_PATH = ROOT_PATH / 'locale'
ROOT = str(ROOT_PATH)
TEMPLATE_DIRS = (os.path.join(ROOT, 'templates'))
DOTLANG_FILES = ['dude', 'walter', 'donny']
# doing this to keep @patch from passing a new mock
# we don't need to the decorated method.
TRUE_MOCK = Mock()
TRUE_MOCK.return_value = True
@override_settings(DEV=False)
class TestTemplateTagFuncs(TestCase):
@patch('lib.l10n_utils.gettext._get_template_tag_set')
@patch('lib.l10n_utils.gettext.cache.get')
@patch('lib.l10n_utils.gettext.cache.set')
def test_cache_hit(self, cache_set_mock, cache_get_mock, template_tags_mock):
"""Should not call other methods on cache hit."""
cache_get_mock.return_value = set(['active'])
self.assertTrue(template_is_active('the/dude', 'de'))
cache_get_mock.assert_called_once_with('template_tag_set:the/dude:de')
self.assertFalse(template_tags_mock.called)
self.assertFalse(cache_set_mock.called)
@patch('lib.l10n_utils.gettext._get_template_tag_set')
@patch('lib.l10n_utils.gettext.cache.get')
@patch('lib.l10n_utils.gettext.cache.set')
def test_cache_miss(self, cache_set_mock, cache_get_mock, template_tags_mock):
"""Should check the files and set the cache on cache miss."""
cache_get_mock.return_value = None
template_tags_mock.return_value = set(['active'])
self.assertTrue(template_is_active('the/dude', 'de'))
cache_key = 'template_tag_set:the/dude:de'
cache_get_mock.assert_called_once_with(cache_key)
self.assertTrue(template_tags_mock.called)
cache_set_mock.assert_called_once_with(cache_key, set(['active']))
@patch('lib.l10n_utils.gettext.get_l10n_path')
@patch('lib.l10n_utils.gettext.get_template')
@patch('lib.l10n_utils.gettext.parse_template')
@patch('lib.l10n_utils.gettext.lang_file_tag_set')
def test_get_template_tag_set(self, lang_file_tag_set, parse_template_mock, get_template,
get_lang_path):
"""Should return a unique set of tags from all lang files."""
parse_template_mock.return_value = ['dude', 'walter']
lang_file_tag_set.side_effect = [set(['dude', 'donny']),
set(['dude', 'uli', 'bunny']),
set(['walter', 'brandt'])]
self.assertSetEqual(_get_template_tag_set('stuff', 'es'),
set(['dude', 'walter', 'donny', 'uli', 'bunny', 'brandt']))
@override_settings(LANGUAGE_CODE='en-US')
def test_template_tag_set_default_locale(self):
"""The default language should always have every tag."""
assert template_has_tag('the_dude', 'en-US', 'active')
class TestPOFiles(TestCase):
good_messages = [
[u'Said angrily, loudly, and repeatedly.',
u'Is this your homework Larry?'],
[None, u'The Dude minds!'],
]
@override_settings(ROOT=ROOT, LOCALES_PATH=LOCALES_PATH)
def test_parse_po(self):
"""Should return correct messages"""
msgs = po_msgs('messages')
expected = {
u'templates/some_lang_files.html': self.good_messages,
u'templates/firefox/fx.html': [[None, u'Find out if your device '
u'is supported »']],
u'bedrock/firefox/templates/firefox/os/notes-1.3.html': [[
u'For bug 982755',
u'The WebIccManager API, which allows support for multiple sim cards, '
u'has had updates: iccChangeEvent has been added using using event '
u'generator <a href="%(url1)s">bug 814637</a>'
]],
}
self.assertDictEqual(msgs, expected)
@override_settings(ROOT=ROOT, LOCALES_PATH=LOCALES_PATH)
@patch('lib.l10n_utils.gettext._append_to_lang_file')
@patch('lib.l10n_utils.gettext.langfiles_for_path')
def test_po_to_langfiles(self, langfiles_mock, append_mock):
"""Should get the correct messages for the correct langfile."""
# This should exclude the supported device message from the pot file.
langfiles_mock.return_value = ['some_lang_files',
'firefox/fx']
pot_to_langfiles('messages')
append_mock.assert_any_call(ANY, self.good_messages)
@patch('os.path.exists', TRUE_MOCK)
@patch('lib.l10n_utils.gettext.codecs')
def test_append_to_lang_file(self, codecs_mock):
"""Should attempt to write a correctly formatted langfile."""
_append_to_lang_file('dude.lang', self.good_messages)
lang_vals = codecs_mock.open.return_value
lang_vals = lang_vals.__enter__.return_value.write.call_args_list
lang_vals = [call[0][0] for call in lang_vals]
expected = [
u'\n\n# Said angrily, loudly, and repeatedly.\n'
u';Is this your homework Larry?\nIs this your homework Larry?\n',
u'\n\n;The Dude minds!\nThe Dude minds!\n',
]
self.assertListEqual(lang_vals, expected)
@patch('os.makedirs')
@patch('lib.l10n_utils.gettext.codecs')
def test_append_to_lang_file_dir_creation(self, codecs_mock, md_mock):
"""Should create dirs if required."""
path_exists = os.path.join(ROOT, 'locale', 'templates', 'firefox',
'fx.lang')
path_dir_exists = os.path.join(ROOT, 'locale', 'templates', 'firefox',
'new.lang')
path_new = os.path.join(ROOT, 'locale', 'de', 'does', 'not',
'exist.lang')
with patch('os.path.dirname') as dn_mock:
_append_to_lang_file(path_exists, {})
assert not dn_mock.called
dn_mock.reset_mock()
dn_mock.return_value = os.path.join(ROOT, 'locale', 'templates',
'firefox')
_append_to_lang_file(path_dir_exists, {})
assert dn_mock.called
md_mock.reset_mock()
_append_to_lang_file(path_dir_exists, {})
assert not md_mock.called
md_mock.reset_mock()
_append_to_lang_file(path_new, {})
assert md_mock.called
@override_settings(ROOT=ROOT,
LOCALES_PATH=LOCALES_PATH,
DOTLANG_FILES=DOTLANG_FILES)
@patch('lib.l10n_utils.gettext.parse_lang')
@patch('lib.l10n_utils.gettext.codecs', MagicMock())
def test_uses_default_lang_files(self, pl_mock):
"""Should use the default files from settings"""
pl_mock.return_value = {} # avoid side-effects
pot_to_langfiles()
calls = [(('{0}/locale/templates/{1}.lang'.format(ROOT, lf),),
{'skip_untranslated': False})
for lf in DOTLANG_FILES]
pl_mock.assert_has_calls(calls)
class TestParseTemplate(TempFileMixin, TestCase):
def setUp(self):
cache.clear()
@patch('lib.l10n_utils.gettext.codecs')
def test_single_lang_file_added(self, codecs_mock):
tempf = self.tempfile("""
{% add_lang_files "lebowski" %}
{% block title %}El Dudarino{% endblock %}
""")
codecs_mock.open.return_value = tempf
lang_files = parse_template('file/doesnt/matter.html')
assert lang_files == ['lebowski']
@patch('lib.l10n_utils.gettext.codecs')
def test_multiple_lang_files_added(self, codecs_mock):
tempf = self.tempfile("""
{% add_lang_files "lebowski" "walter" "dude" %}
{% block title %}El Dudarino{% endblock %}
""")
codecs_mock.open.return_value = tempf
lang_files = parse_template('file/doesnt/matter.html')
assert lang_files == ['lebowski', 'walter', 'dude']
class TestParsePython(TempFileMixin, TestCase):
@patch('lib.l10n_utils.gettext.codecs')
def test_new_lang_file_defined_list(self, codecs_mock):
"""
If `LANG_FILES` is defined as a single item list it should be returned.
"""
tempf = self.tempfile("""
from lib.l10n_utils.dotlang import _
LANG_FILES = ['lebowski']
walter_says = _("Donnie you're outa your element!")
""")
codecs_mock.open.return_value = tempf
lang_files = parse_python('file/doesnt/matter.py')
assert lang_files == ['lebowski']
@patch('lib.l10n_utils.gettext.codecs')
def test_new_multiple_lang_files_defined_list(self, codecs_mock):
"""
If `LANG_FILES` is defined as a list it should be returned.
"""
tempf = self.tempfile("""
from lib.l10n_utils.dotlang import _
LANG_FILES = ['lebowski', 'dude']
walter_says = _("Donnie you're outa your element!")
""")
codecs_mock.open.return_value = tempf
lang_files = parse_python('file/doesnt/matter.py')
assert lang_files == ['lebowski', 'dude']
@patch('lib.l10n_utils.gettext.codecs')
def test_new_multiple_lang_files_multi_line(self, codecs_mock):
"""
If `LANG_FILES` is defined as a multiline list it should be returned.
"""
tempf = self.tempfile("""
from lib.l10n_utils.dotlang import _
LANG_FILES = [
'lebowski',
'dude',
]
walter_says = _("Donnie you're outa your element!")
""")
codecs_mock.open.return_value = tempf
lang_files = parse_python('file/doesnt/matter.py')
assert lang_files == ['lebowski', 'dude']
@patch('lib.l10n_utils.gettext.codecs')
def test_new_single_lang_file_defined(self, codecs_mock):
"""
If `LANG_FILES` is defined as a string it should be returned as a
list of length 1.
"""
tempf = self.tempfile("""
from lib.l10n_utils.dotlang import _
LANG_FILES = 'lebowski'
walter_says = _("I'm stayin... Finishin' my coffee.")
""")
codecs_mock.open.return_value = tempf
lang_files = parse_python('file/doesnt/matter.py')
assert lang_files == ['lebowski']
@patch('lib.l10n_utils.gettext.codecs')
def test_new_single_lang_file_defined_dbl_quote(self, codecs_mock):
"""
If `LANG_FILES` is defined as a double quoted string it should be
returned as a list of length 1.
"""
tempf = self.tempfile("""
from lib.l10n_utils.dotlang import _
LANG_FILES = "lebowski"
walter_says = _("I'm stayin... Finishin' my coffee.")
""")
codecs_mock.open.return_value = tempf
lang_files = parse_python('file/doesnt/matter.py')
assert lang_files == ['lebowski']
@patch('lib.l10n_utils.gettext.codecs')
def test_no_lang_files_defined(self, codecs_mock):
"""
If `LANG_FILES` is not defined an empty list should be returned.
"""
tempf = self.tempfile("""
from lib.l10n_utils.dotlang import _
stuff = _('whatnot')
""")
codecs_mock.open.return_value = tempf
lang_files = parse_python('file/doesnt/matter.py')
assert lang_files == []
class TestLangfilesForPath(TestCase):
def test_tmpl_no_lang_files_defined(self):
"""
If no lang files are set, a lang file name derived from the template
path should be used.
"""
lang_files = langfiles_for_path('lib/l10n_utils/tests/test_files/'
'templates/no_lang_files.html')
assert lang_files == ['no_lang_files']
def test_templ_lang_files_defined(self):
""" If lang files are set, they should be returned. """
lang_files = langfiles_for_path('lib/l10n_utils/tests/test_files/'
'templates/some_lang_files.html')
assert lang_files == ['dude', 'walter', 'main']
def test_py_no_lang_files_defined(self):
"""
If `LANG_FILES` is not defined a list containing the first item in
`settings.DOTLANG_FILES` should be returned.
"""
lang_files = langfiles_for_path('lib/l10n_utils/tests/test_files/'
'extract_me.py')
assert lang_files == [settings.DOTLANG_FILES[0]]
def test_py_lang_files_defined(self):
"""
If `LANG_FILES` is defined a list of the values should be returned.
"""
lang_files = langfiles_for_path('lib/l10n_utils/tests/test_files/'
'extract_me_with_langfiles.py')
assert lang_files == ['lebowski', 'dude']
| MichaelKohler/bedrock | lib/l10n_utils/tests/test_gettext.py | Python | mpl-2.0 | 13,596 |
import openturns as ot
from openturns.viewer import View
ot.RandomGenerator.SetSeed(0)
dimension = 2
R = ot.CorrelationMatrix(dimension)
R[0, 1] = 0.8
distribution = ot.Normal([3.] * dimension, [2.] * dimension, R)
size = 100
sample = distribution.getSample(size)
firstSample = ot.NumericalSample(size, 1)
secondSample = ot.NumericalSample(size, 1)
for i in range(size):
firstSample[i] = ot.NumericalPoint(1, sample[i, 0])
secondSample[i] = ot.NumericalPoint(1, sample[i, 1])
lmtest = ot.LinearModelFactory().build(firstSample, secondSample)
drawLinearModelResidual = ot.VisualTest_DrawLinearModelResidual(
firstSample, secondSample, lmtest)
View(drawLinearModelResidual, figure_kwargs={'figsize': (5, 5)})
| aurelieladier/openturns | python/doc/pyplots/VisualTest_DrawLinearModelResidual.py | Python | lgpl-3.0 | 724 |
#!/usr/bin/env python
"""Multi Bridge Example
Identical to the Hello Bridge Example but with a 2nd child.
"""
from __future__ import print_function
from os import getpid
from circuits import ipc, Event, Component
class go(Event):
"""go Event"""
class hello(Event):
"""hello Event"""
class Child(Component):
def hello(self):
return "Hello from child with pid {0}".format(getpid())
class App(Component):
def init(self):
self.counter = 0
self.child1 = Child().start(process=True, link=self)
self.child2 = Child().start(process=True, link=self)
def ready(self, *args):
self.counter += 1
if self.counter < 2:
return
self.fire(go())
def go(self):
x = yield self.call(hello())
yield print(x)
y = yield self.call(ipc(hello()), self.child1[1].channel)
yield print(y)
z = yield self.call(ipc(hello()), self.child2[1].channel)
yield print(z)
raise SystemExit(0)
def hello(self):
return "Hello from parent with pid {0}".format(getpid())
App().run()
| nizox/circuits | examples/hello_multi_bridge.py | Python | mit | 1,122 |
from flask import Flask, jsonify
from flask.views import MethodView
from flask_swagger import swagger
app = Flask(__name__)
class UserAPI(MethodView):
def get(self, team_id):
"""
Get a list of users
First line is the summary
All following lines until the hyphens is added to description
---
tags:
- users
responses:
200:
description: Returns a list of users
"""
return []
def post(self, team_id):
"""
Create a new user
---
tags:
- users
parameters:
- in: body
name: body
schema:
id: User
required:
- email
- name
properties:
email:
type: string
description: email for user
name:
type: string
description: name for user
responses:
201:
description: User created
"""
return {}
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Origin','*')
response.headers.add('Access-Control-Allow-Headers', "Authorization, Content-Type")
response.headers.add('Access-Control-Expose-Headers', "Authorization")
response.headers.add('Access-Control-Allow-Methods', "GET, POST, PUT, DELETE, OPTIONS")
response.headers.add('Access-Control-Allow-Credentials', "true")
response.headers.add('Access-Control-Max-Age', 60 * 60 * 24 * 20)
return response
view = UserAPI.as_view('users')
app.add_url_rule('/users/<int:team_id>', view_func=view, methods=["GET"])
app.add_url_rule('/testing/<int:team_id>', view_func=view)
@app.route("/hacky")
def bla():
"""
An endpoint that isn't using method view
---
tags:
- hacks
responses:
200:
description: Hacked some hacks
schema:
id: Hack
properties:
hack:
type: string
description: it's a hack
subitems:
type: array
items:
schema:
id: SubItem
properties:
bla:
type: string
description: Bla
blu:
type: integer
description: Blu
"""
return jsonify(['hacky'])
class PetAPI(MethodView):
def get(self, pet_id):
"""
Get a pet.
This is an example of how to use references and factored out definitions
---
tags:
- pets
parameters:
- in: path
name: pet_id
definitions:
- schema:
id: Pet
required:
- name
- owner
properties:
name:
type: string
description: the pet's name
owner:
$ref: '#/definitions/Owner'
- schema:
id: Owner
required:
- name
properties:
name:
type: string
description: the owner's name
responses:
200:
description: Returns the specified pet
$ref: '#/definitions/Pet'
"""
return {}
pet_view = PetAPI.as_view('pets')
app.add_url_rule('/pets/<int:pet_id>', view_func=pet_view, methods=["GET"])
@app.route("/")
def hello():
return "Hello World!"
@app.route("/spec")
def spec():
return jsonify(swagger(app))
if __name__ == "__main__":
app.run(debug=True)
| ibratoev/flask-swagger | examples/example.py | Python | mit | 3,764 |
############################################
# Copyright (c) 2012 Microsoft Corporation
#
# Z3 Python interface
#
# Author: Leonardo de Moura (leonardo)
############################################
import sys, io, z3
from z3consts import *
from z3core import *
from ctypes import *
##############################
#
# Configuration
#
##############################
# Z3 operator names to Z3Py
_z3_op_to_str = {
Z3_OP_TRUE : 'True', Z3_OP_FALSE : 'False', Z3_OP_EQ : '==', Z3_OP_DISTINCT : 'Distinct',
Z3_OP_ITE : 'If', Z3_OP_AND : 'And', Z3_OP_OR : 'Or', Z3_OP_IFF : '==', Z3_OP_XOR : 'Xor',
Z3_OP_NOT : 'Not', Z3_OP_IMPLIES : 'Implies', Z3_OP_IDIV : '/', Z3_OP_MOD : '%',
Z3_OP_TO_REAL : 'ToReal', Z3_OP_TO_INT : 'ToInt', Z3_OP_POWER : '**', Z3_OP_IS_INT : 'IsInt',
Z3_OP_BADD : '+', Z3_OP_BSUB : '-', Z3_OP_BMUL : '*', Z3_OP_BOR : '|', Z3_OP_BAND : '&',
Z3_OP_BNOT : '~', Z3_OP_BXOR : '^', Z3_OP_BNEG : '-', Z3_OP_BUDIV : 'UDiv', Z3_OP_BSDIV : '/', Z3_OP_BSMOD : '%',
Z3_OP_BSREM : 'SRem', Z3_OP_BUREM : 'URem', Z3_OP_EXT_ROTATE_LEFT : 'RotateLeft', Z3_OP_EXT_ROTATE_RIGHT : 'RotateRight',
Z3_OP_SLEQ : '<=', Z3_OP_SLT : '<', Z3_OP_SGEQ : '>=', Z3_OP_SGT : '>',
Z3_OP_ULEQ : 'ULE', Z3_OP_ULT : 'ULT', Z3_OP_UGEQ : 'UGE', Z3_OP_UGT : 'UGT',
Z3_OP_SIGN_EXT : 'SignExt', Z3_OP_ZERO_EXT : 'ZeroExt', Z3_OP_REPEAT : 'RepeatBitVec',
Z3_OP_BASHR : '>>', Z3_OP_BSHL : '<<', Z3_OP_BLSHR : 'LShR',
Z3_OP_CONCAT : 'Concat', Z3_OP_EXTRACT : 'Extract', Z3_OP_BV2INT : 'BV2Int',
Z3_OP_ARRAY_MAP : 'Map', Z3_OP_SELECT : 'Select', Z3_OP_STORE : 'Store',
Z3_OP_CONST_ARRAY : 'K'
}
# List of infix operators
_z3_infix = [
Z3_OP_EQ, Z3_OP_IFF, Z3_OP_ADD, Z3_OP_SUB, Z3_OP_MUL, Z3_OP_DIV, Z3_OP_IDIV, Z3_OP_MOD, Z3_OP_POWER,
Z3_OP_LE, Z3_OP_LT, Z3_OP_GE, Z3_OP_GT, Z3_OP_BADD, Z3_OP_BSUB, Z3_OP_BMUL, Z3_OP_BSDIV, Z3_OP_BSMOD, Z3_OP_BOR, Z3_OP_BAND,
Z3_OP_BXOR, Z3_OP_BSDIV, Z3_OP_SLEQ, Z3_OP_SLT, Z3_OP_SGEQ, Z3_OP_SGT, Z3_OP_BASHR, Z3_OP_BSHL
]
_z3_unary = [ Z3_OP_UMINUS, Z3_OP_BNOT, Z3_OP_BNEG ]
# Precedence
_z3_precedence = {
Z3_OP_POWER : 0,
Z3_OP_UMINUS : 1, Z3_OP_BNEG : 1, Z3_OP_BNOT : 1,
Z3_OP_MUL : 2, Z3_OP_DIV : 2, Z3_OP_IDIV : 2, Z3_OP_MOD : 2, Z3_OP_BMUL : 2, Z3_OP_BSDIV : 2, Z3_OP_BSMOD : 2,
Z3_OP_ADD : 3, Z3_OP_SUB : 3, Z3_OP_BADD : 3, Z3_OP_BSUB : 3,
Z3_OP_BASHR : 4, Z3_OP_BSHL : 4,
Z3_OP_BAND : 5,
Z3_OP_BXOR : 6,
Z3_OP_BOR : 7,
Z3_OP_LE : 8, Z3_OP_LT : 8, Z3_OP_GE : 8, Z3_OP_GT : 8, Z3_OP_EQ : 8, Z3_OP_SLEQ : 8, Z3_OP_SLT : 8, Z3_OP_SGEQ : 8, Z3_OP_SGT : 8,
Z3_OP_IFF : 8,
Z3_OP_FPA_NEG : 1,
Z3_OP_FPA_MUL : 2, Z3_OP_FPA_DIV : 2, Z3_OP_FPA_REM : 2, Z3_OP_FPA_FMA : 2,
Z3_OP_FPA_ADD: 3, Z3_OP_FPA_SUB : 3,
Z3_OP_FPA_LE : 8, Z3_OP_FPA_LT : 8, Z3_OP_FPA_GE : 8, Z3_OP_FPA_GT : 8, Z3_OP_FPA_EQ : 8
}
# FPA operators
_z3_op_to_fpa_normal_str = {
Z3_OP_FPA_RM_NEAREST_TIES_TO_EVEN : 'RoundNearestTiesToEven()', Z3_OP_FPA_RM_NEAREST_TIES_TO_AWAY : 'RoundNearestTiesToAway()',
Z3_OP_FPA_RM_TOWARD_POSITIVE : 'RoundTowardPositive()', Z3_OP_FPA_RM_TOWARD_NEGATIVE : 'RoundTowardNegative()',
Z3_OP_FPA_RM_TOWARD_ZERO : 'RoundTowardZero()',
Z3_OP_FPA_PLUS_INF : '+oo', Z3_OP_FPA_MINUS_INF : '-oo',
Z3_OP_FPA_NAN : 'NaN', Z3_OP_FPA_PLUS_ZERO : 'PZero', Z3_OP_FPA_MINUS_ZERO : 'NZero',
Z3_OP_FPA_ADD : 'fpAdd', Z3_OP_FPA_SUB : 'fpSub', Z3_OP_FPA_NEG : 'fpNeg', Z3_OP_FPA_MUL : 'fpMul',
Z3_OP_FPA_DIV : 'fpDiv', Z3_OP_FPA_REM : 'fpRem', Z3_OP_FPA_ABS : 'fpAbs',
Z3_OP_FPA_MIN : 'fpMin', Z3_OP_FPA_MAX : 'fpMax',
Z3_OP_FPA_FMA : 'fpFMA', Z3_OP_FPA_SQRT : 'fpSqrt', Z3_OP_FPA_ROUND_TO_INTEGRAL : 'fpRoundToIntegral',
Z3_OP_FPA_EQ : 'fpEQ', Z3_OP_FPA_LT : 'fpLT', Z3_OP_FPA_GT : 'fpGT', Z3_OP_FPA_LE : 'fpLEQ',
Z3_OP_FPA_GE : 'fpGEQ',
Z3_OP_FPA_IS_NAN : 'fpIsNaN', Z3_OP_FPA_IS_INF : 'fpIsInf', Z3_OP_FPA_IS_ZERO : 'fpIsZero',
Z3_OP_FPA_IS_NORMAL : 'fpIsNormal', Z3_OP_FPA_IS_SUBNORMAL : 'fpIsSubnormal',
Z3_OP_FPA_IS_NEGATIVE : 'fpIsNegative', Z3_OP_FPA_IS_POSITIVE : 'fpIsPositive',
Z3_OP_FPA_FP : 'fpFP', Z3_OP_FPA_TO_FP : 'fpToFP', Z3_OP_FPA_TO_FP_UNSIGNED: 'fpToFPUnsigned',
Z3_OP_FPA_TO_UBV : 'fpToUBV', Z3_OP_FPA_TO_SBV : 'fpToSBV', Z3_OP_FPA_TO_REAL: 'fpToReal',
Z3_OP_FPA_TO_IEEE_BV : 'fpToIEEEBV'
}
_z3_op_to_fpa_pretty_str = {
Z3_OP_FPA_RM_NEAREST_TIES_TO_EVEN : 'RNE()', Z3_OP_FPA_RM_NEAREST_TIES_TO_AWAY : 'RNA()',
Z3_OP_FPA_RM_TOWARD_POSITIVE : 'RTP()', Z3_OP_FPA_RM_TOWARD_NEGATIVE : 'RTN()',
Z3_OP_FPA_RM_TOWARD_ZERO : 'RTZ()',
Z3_OP_FPA_PLUS_INF : '+oo', Z3_OP_FPA_MINUS_INF : '-oo',
Z3_OP_FPA_NAN : 'NaN', Z3_OP_FPA_PLUS_ZERO : '+0.0', Z3_OP_FPA_MINUS_ZERO : '-0.0',
Z3_OP_FPA_ADD : '+', Z3_OP_FPA_SUB : '-', Z3_OP_FPA_MUL : '*', Z3_OP_FPA_DIV : '/',
Z3_OP_FPA_REM : '%', Z3_OP_FPA_NEG : '-',
Z3_OP_FPA_EQ : 'fpEQ', Z3_OP_FPA_LT : '<', Z3_OP_FPA_GT : '>', Z3_OP_FPA_LE : '<=', Z3_OP_FPA_GE : '>='
}
_z3_fpa_infix = [
Z3_OP_FPA_ADD, Z3_OP_FPA_SUB, Z3_OP_FPA_MUL, Z3_OP_FPA_DIV, Z3_OP_FPA_REM,
Z3_OP_FPA_LT, Z3_OP_FPA_GT, Z3_OP_FPA_LE, Z3_OP_FPA_GE
]
def _is_assoc(k):
return k == Z3_OP_BOR or k == Z3_OP_BXOR or k == Z3_OP_BAND or k == Z3_OP_ADD or k == Z3_OP_BADD or k == Z3_OP_MUL or k == Z3_OP_BMUL
def _is_left_assoc(k):
return _is_assoc(k) or k == Z3_OP_SUB or k == Z3_OP_BSUB
def _is_html_assoc(k):
return k == Z3_OP_AND or k == Z3_OP_OR or k == Z3_OP_IFF or _is_assoc(k)
def _is_html_left_assoc(k):
return _is_html_assoc(k) or k == Z3_OP_SUB or k == Z3_OP_BSUB
def _is_add(k):
return k == Z3_OP_ADD or k == Z3_OP_BADD
def _is_sub(k):
return k == Z3_OP_SUB or k == Z3_OP_BSUB
import sys
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
_z3_infix_compact = [ Z3_OP_MUL, Z3_OP_BMUL, Z3_OP_POWER, Z3_OP_DIV, Z3_OP_IDIV, Z3_OP_MOD, Z3_OP_BSDIV, Z3_OP_BSMOD ]
_ellipses = '...'
_html_ellipses = '…'
# Overwrite some of the operators for HTML
_z3_pre_html_op_to_str = { Z3_OP_EQ : '=', Z3_OP_IFF : '=', Z3_OP_NOT : '¬',
Z3_OP_AND : '∧', Z3_OP_OR : '∨', Z3_OP_IMPLIES : '⇒',
Z3_OP_LT : '<', Z3_OP_GT : '>', Z3_OP_LE : '≤', Z3_OP_GE : '≥',
Z3_OP_MUL : '·',
Z3_OP_SLEQ : '≤', Z3_OP_SLT : '<', Z3_OP_SGEQ : '≥', Z3_OP_SGT : '>',
Z3_OP_ULEQ : '≤<sub>u</sub>', Z3_OP_ULT : '<<sub>u</sub>',
Z3_OP_UGEQ : '≥<sub>u</sub>', Z3_OP_UGT : '><sub>u</sub>',
Z3_OP_BMUL : '·',
Z3_OP_BUDIV : '/<sub>u</sub>', Z3_OP_BUREM : '%<sub>u</sub>',
Z3_OP_BASHR : '>>', Z3_OP_BSHL : '<<',
Z3_OP_BLSHR : '>><sub>u</sub>'
}
# Extra operators that are infix/unary for HTML
_z3_html_infix = [ Z3_OP_AND, Z3_OP_OR, Z3_OP_IMPLIES,
Z3_OP_ULEQ, Z3_OP_ULT, Z3_OP_UGEQ, Z3_OP_UGT, Z3_OP_BUDIV, Z3_OP_BUREM, Z3_OP_BLSHR
]
_z3_html_unary = [ Z3_OP_NOT ]
# Extra Precedence for HTML
_z3_pre_html_precedence = { Z3_OP_BUDIV : 2, Z3_OP_BUREM : 2,
Z3_OP_BLSHR : 4,
Z3_OP_ULEQ : 8, Z3_OP_ULT : 8,
Z3_OP_UGEQ : 8, Z3_OP_UGT : 8,
Z3_OP_ULEQ : 8, Z3_OP_ULT : 8,
Z3_OP_UGEQ : 8, Z3_OP_UGT : 8,
Z3_OP_NOT : 1,
Z3_OP_AND : 10,
Z3_OP_OR : 11,
Z3_OP_IMPLIES : 12 }
##############################
#
# End of Configuration
#
##############################
def _support_pp(a):
return isinstance(a, z3.Z3PPObject) or isinstance(a, list) or isinstance(a, tuple)
_infix_map = {}
_unary_map = {}
_infix_compact_map = {}
for _k in _z3_infix:
_infix_map[_k] = True
for _k in _z3_unary:
_unary_map[_k] = True
for _k in _z3_infix_compact:
_infix_compact_map[_k] = True
def _is_infix(k):
global _infix_map
return _infix_map.get(k, False)
def _is_infix_compact(k):
global _infix_compact_map
return _infix_compact_map.get(k, False)
def _is_unary(k):
global _unary_map
return _unary_map.get(k, False)
def _op_name(a):
if isinstance(a, z3.FuncDeclRef):
f = a
else:
f = a.decl()
k = f.kind()
n = _z3_op_to_str.get(k, None)
if n == None:
return f.name()
else:
return n
def _get_precedence(k):
global _z3_precedence
return _z3_precedence.get(k, 100000)
_z3_html_op_to_str = {}
for _k in _z3_op_to_str:
_v = _z3_op_to_str[_k]
_z3_html_op_to_str[_k] = _v
for _k in _z3_pre_html_op_to_str:
_v = _z3_pre_html_op_to_str[_k]
_z3_html_op_to_str[_k] = _v
_z3_html_precedence = {}
for _k in _z3_precedence:
_v = _z3_precedence[_k]
_z3_html_precedence[_k] = _v
for _k in _z3_pre_html_precedence:
_v = _z3_pre_html_precedence[_k]
_z3_html_precedence[_k] = _v
_html_infix_map = {}
_html_unary_map = {}
for _k in _z3_infix:
_html_infix_map[_k] = True
for _k in _z3_html_infix:
_html_infix_map[_k] = True
for _k in _z3_unary:
_html_unary_map[_k] = True
for _k in _z3_html_unary:
_html_unary_map[_k] = True
def _is_html_infix(k):
global _html_infix_map
return _html_infix_map.get(k, False)
def _is_html_unary(k):
global _html_unary_map
return _html_unary_map.get(k, False)
def _html_op_name(a):
global _z3_html_op_to_str
if isinstance(a, z3.FuncDeclRef):
f = a
else:
f = a.decl()
k = f.kind()
n = _z3_html_op_to_str.get(k, None)
if n == None:
sym = Z3_get_decl_name(f.ctx_ref(), f.ast)
if Z3_get_symbol_kind(f.ctx_ref(), sym) == Z3_INT_SYMBOL:
return "ζ<sub>%s</sub>" % Z3_get_symbol_int(f.ctx_ref(), sym)
else:
# Sanitize the string
return f.name()
else:
return n
def _get_html_precedence(k):
global _z3_html_predence
return _z3_html_precedence.get(k, 100000)
class FormatObject:
def is_compose(self):
return False
def is_choice(self):
return False
def is_indent(self):
return False
def is_string(self):
return False
def is_linebreak(self):
return False
def is_nil(self):
return True
def children(self):
return []
def as_tuple(self):
return None
def space_upto_nl(self):
return (0, False)
def flat(self):
return self
class NAryFormatObject(FormatObject):
def __init__(self, fs):
assert all([isinstance(a, FormatObject) for a in fs])
self.children = fs
def children(self):
return self.children
class ComposeFormatObject(NAryFormatObject):
def is_compose(sef):
return True
def as_tuple(self):
return ('compose', [ a.as_tuple() for a in self.children ])
def space_upto_nl(self):
r = 0
for child in self.children:
s, nl = child.space_upto_nl()
r = r + s
if nl:
return (r, True)
return (r, False)
def flat(self):
return compose([a.flat() for a in self.children ])
class ChoiceFormatObject(NAryFormatObject):
def is_choice(sef):
return True
def as_tuple(self):
return ('choice', [ a.as_tuple() for a in self.children ])
def space_upto_nl(self):
return self.children[0].space_upto_nl()
def flat(self):
return self.children[0].flat()
class IndentFormatObject(FormatObject):
def __init__(self, indent, child):
assert isinstance(child, FormatObject)
self.indent = indent
self.child = child
def children(self):
return [self.child]
def as_tuple(self):
return ('indent', self.indent, self.child.as_tuple())
def space_upto_nl(self):
return self.child.space_upto_nl()
def flat(self):
return indent(self.indent, self.child.flat())
def is_indent(self):
return True
class LineBreakFormatObject(FormatObject):
def __init__(self):
self.space = ' '
def is_linebreak(self):
return True
def as_tuple(self):
return '<line-break>'
def space_upto_nl(self):
return (0, True)
def flat(self):
return to_format(self.space)
class StringFormatObject(FormatObject):
def __init__(self, string):
assert isinstance(string, str)
self.string = string
def is_string(self):
return True
def as_tuple(self):
return self.string
def space_upto_nl(self):
return (getattr(self, 'size', len(self.string)), False)
def fits(f, space_left):
s, nl = f.space_upto_nl()
return s <= space_left
def to_format(arg, size=None):
if isinstance(arg, FormatObject):
return arg
else:
r = StringFormatObject(str(arg))
if size != None:
r.size = size
return r
def compose(*args):
if len(args) == 1 and (isinstance(args[0], list) or isinstance(args[0], tuple)):
args = args[0]
return ComposeFormatObject(args)
def indent(i, arg):
return IndentFormatObject(i, arg)
def group(arg):
return ChoiceFormatObject([arg.flat(), arg])
def line_break():
return LineBreakFormatObject()
def _len(a):
if isinstance(a, StringFormatObject):
return getattr(a, 'size', len(a.string))
else:
return len(a)
def seq(args, sep=',', space=True):
nl = line_break()
if not space:
nl.space = ''
r = []
r.append(args[0])
num = len(args)
for i in range(num - 1):
r.append(to_format(sep))
r.append(nl)
r.append(args[i+1])
return compose(r)
def seq1(header, args, lp='(', rp=')'):
return group(compose(to_format(header),
to_format(lp),
indent(len(lp) + _len(header),
seq(args)),
to_format(rp)))
def seq2(header, args, i=4, lp='(', rp=')'):
if len(args) == 0:
return compose(to_format(header), to_format(lp), to_format(rp))
else:
return group(compose(indent(len(lp), compose(to_format(lp), to_format(header))),
indent(i, compose(seq(args), to_format(rp)))))
def seq3(args, lp='(', rp=')'):
if len(args) == 0:
return compose(to_format(lp), to_format(rp))
else:
return group(indent(len(lp), compose(to_format(lp), seq(args), to_format(rp))))
class StopPPException(Exception):
def __str__(self):
return 'pp-interrupted'
class PP:
def __init__(self):
self.max_lines = 200
self.max_width = 60
self.bounded = False
self.max_indent = 40
def pp_string(self, f, indent):
if not self.bounded or self.pos <= self.max_width:
sz = _len(f)
if self.bounded and self.pos + sz > self.max_width:
self.out.write(u(_ellipses))
else:
self.pos = self.pos + sz
self.ribbon_pos = self.ribbon_pos + sz
self.out.write(u(f.string))
def pp_compose(self, f, indent):
for c in f.children:
self.pp(c, indent)
def pp_choice(self, f, indent):
space_left = self.max_width - self.pos
if space_left > 0 and fits(f.children[0], space_left):
self.pp(f.children[0], indent)
else:
self.pp(f.children[1], indent)
def pp_line_break(self, f, indent):
self.pos = indent
self.ribbon_pos = 0
self.line = self.line + 1
if self.line < self.max_lines:
self.out.write(u('\n'))
for i in range(indent):
self.out.write(u(' '))
else:
self.out.write(u('\n...'))
raise StopPPException()
def pp(self, f, indent):
if f.is_string():
self.pp_string(f, indent)
elif f.is_indent():
self.pp(f.child, min(indent + f.indent, self.max_indent))
elif f.is_compose():
self.pp_compose(f, indent)
elif f.is_choice():
self.pp_choice(f, indent)
elif f.is_linebreak():
self.pp_line_break(f, indent)
else:
return
def __call__(self, out, f):
try:
self.pos = 0
self.ribbon_pos = 0
self.line = 0
self.out = out
self.pp(f, 0)
except StopPPException:
return
class Formatter:
def __init__(self):
global _ellipses
self.max_depth = 20
self.max_args = 128
self.rational_to_decimal = False
self.precision = 10
self.ellipses = to_format(_ellipses)
self.max_visited = 10000
self.fpa_pretty = True
def pp_ellipses(self):
return self.ellipses
def pp_arrow(self):
return ' ->'
def pp_unknown(self):
return '<unknown>'
def pp_name(self, a):
return to_format(_op_name(a))
def is_infix(self, a):
return _is_infix(a)
def is_unary(self, a):
return _is_unary(a)
def get_precedence(self, a):
return _get_precedence(a)
def is_infix_compact(self, a):
return _is_infix_compact(a)
def is_infix_unary(self, a):
return self.is_infix(a) or self.is_unary(a)
def add_paren(self, a):
return compose(to_format('('), indent(1, a), to_format(')'))
def pp_sort(self, s):
if isinstance(s, z3.ArraySortRef):
return seq1('Array', (self.pp_sort(s.domain()), self.pp_sort(s.range())))
elif isinstance(s, z3.BitVecSortRef):
return seq1('BitVec', (to_format(s.size()), ))
elif isinstance(s, z3.FPSortRef):
return seq1('FPSort', (to_format(s.ebits()), to_format(s.sbits())))
else:
return to_format(s.name())
def pp_const(self, a):
return self.pp_name(a)
def pp_int(self, a):
return to_format(a.as_string())
def pp_rational(self, a):
if not self.rational_to_decimal:
return to_format(a.as_string())
else:
return to_format(a.as_decimal(self.precision))
def pp_algebraic(self, a):
return to_format(a.as_decimal(self.precision))
def pp_bv(self, a):
return to_format(a.as_string())
def pp_fprm_value(self, a):
z3._z3_assert(z3.is_fprm_value(a), 'expected FPRMNumRef')
if self.fpa_pretty and (a.decl().kind() in _z3_op_to_fpa_pretty_str):
return to_format(_z3_op_to_fpa_pretty_str.get(a.decl().kind()))
else:
return to_format(_z3_op_to_fpa_normal_str.get(a.decl().kind()))
def pp_fp_value(self, a):
z3._z3_assert(isinstance(a, z3.FPNumRef), 'type mismatch')
if not self.fpa_pretty:
if (a.isNaN()):
return to_format('NaN')
elif (a.isInf()):
if (a.isNegative()):
return to_format('-oo')
else:
return to_format('+oo')
elif (a.isZero()):
if (a.isNegative()):
return to_format('-zero')
else:
return to_format('+zero')
else:
z3._z3_assert(z3.is_fp_value(a), 'expecting FP num ast')
r = []
sgn = c_int(0)
sgnb = Z3_fpa_get_numeral_sign(a.ctx_ref(), a.ast, byref(sgn))
sig = Z3_fpa_get_numeral_significand_string(a.ctx_ref(), a.ast)
exp = Z3_fpa_get_numeral_exponent_string(a.ctx_ref(), a.ast)
r.append(to_format('FPVal('))
if sgnb and sgn.value != 0:
r.append(to_format('-'))
r.append(to_format(sig))
r.append(to_format('*(2**'))
r.append(to_format(exp))
r.append(to_format(', '))
r.append(to_format(a.sort()))
r.append(to_format('))'))
return compose(r)
else:
if (a.isNaN()):
return to_format(_z3_op_to_fpa_pretty_str[Z3_OP_FPA_NAN])
elif (a.isInf()):
if (a.isNegative()):
return to_format(_z3_op_to_fpa_pretty_str[Z3_OP_FPA_MINUS_INF])
else:
return to_format(_z3_op_to_fpa_pretty_str[Z3_OP_FPA_PLUS_INF])
elif (a.isZero()):
if (a.isNegative()):
return to_format(_z3_op_to_fpa_pretty_str[Z3_OP_FPA_MINUS_ZERO])
else:
return to_format(_z3_op_to_fpa_pretty_str[Z3_OP_FPA_PLUS_ZERO])
else:
z3._z3_assert(z3.is_fp_value(a), 'expecting FP num ast')
r = []
sgn = (ctypes.c_int)(0)
sgnb = Z3_fpa_get_numeral_sign(a.ctx_ref(), a.ast, byref(sgn))
sig = Z3_fpa_get_numeral_significand_string(a.ctx_ref(), a.ast)
exp = Z3_fpa_get_numeral_exponent_string(a.ctx_ref(), a.ast)
if sgnb and sgn.value != 0:
r.append(to_format('-'))
r.append(to_format(sig))
if (exp != '0'):
r.append(to_format('*(2**'))
r.append(to_format(exp))
r.append(to_format(')'))
return compose(r)
def pp_fp(self, a, d, xs):
z3._z3_assert(isinstance(a, z3.FPRef), "type mismatch")
k = a.decl().kind()
op = '?'
if (self.fpa_pretty and k in _z3_op_to_fpa_pretty_str):
op = _z3_op_to_fpa_pretty_str[k]
elif k in _z3_op_to_fpa_normal_str:
op = _z3_op_to_fpa_normal_str[k]
elif k in _z3_op_to_str:
op = _z3_op_to_str[k]
n = a.num_args()
if self.fpa_pretty:
if self.is_infix(k) and n >= 3:
rm = a.arg(0)
if z3.is_fprm_value(rm) and z3._dflt_rm(a.ctx).eq(rm):
arg1 = to_format(self.pp_expr(a.arg(1), d+1, xs))
arg2 = to_format(self.pp_expr(a.arg(2), d+1, xs))
r = []
r.append(arg1)
r.append(to_format(' '))
r.append(to_format(op))
r.append(to_format(' '))
r.append(arg2)
return compose(r)
elif k == Z3_OP_FPA_NEG:
return compose([to_format('-') , to_format(self.pp_expr(a.arg(0), d+1, xs))])
if k in _z3_op_to_fpa_normal_str:
op = _z3_op_to_fpa_normal_str[k]
r = []
r.append(to_format(op))
if not z3.is_const(a):
r.append(to_format('('))
first = True
for c in a.children():
if first:
first = False
else:
r.append(to_format(', '))
r.append(self.pp_expr(c, d+1, xs))
r.append(to_format(')'))
return compose(r)
else:
return to_format(a.as_string())
def pp_prefix(self, a, d, xs):
r = []
sz = 0
for child in a.children():
r.append(self.pp_expr(child, d+1, xs))
sz = sz + 1
if sz > self.max_args:
r.append(self.pp_ellipses())
break
return seq1(self.pp_name(a), r)
def is_assoc(self, k):
return _is_assoc(k)
def is_left_assoc(self, k):
return _is_left_assoc(k)
def infix_args_core(self, a, d, xs, r):
sz = len(r)
k = a.decl().kind()
p = self.get_precedence(k)
first = True
for child in a.children():
child_pp = self.pp_expr(child, d+1, xs)
child_k = None
if z3.is_app(child):
child_k = child.decl().kind()
if k == child_k and (self.is_assoc(k) or (first and self.is_left_assoc(k))):
self.infix_args_core(child, d, xs, r)
sz = len(r)
if sz > self.max_args:
return
elif self.is_infix_unary(child_k):
child_p = self.get_precedence(child_k)
if p > child_p or (_is_add(k) and _is_sub(child_k)) or (_is_sub(k) and first and _is_add(child_k)):
r.append(child_pp)
else:
r.append(self.add_paren(child_pp))
sz = sz + 1
elif z3.is_quantifier(child):
r.append(self.add_paren(child_pp))
else:
r.append(child_pp)
sz = sz + 1
if sz > self.max_args:
r.append(self.pp_ellipses())
return
first = False
def infix_args(self, a, d, xs):
r = []
self.infix_args_core(a, d, xs, r)
return r
def pp_infix(self, a, d, xs):
k = a.decl().kind()
if self.is_infix_compact(k):
op = self.pp_name(a)
return group(seq(self.infix_args(a, d, xs), op, False))
else:
op = self.pp_name(a)
sz = _len(op)
op.string = ' ' + op.string
op.size = sz + 1
return group(seq(self.infix_args(a, d, xs), op))
def pp_unary(self, a, d, xs):
k = a.decl().kind()
p = self.get_precedence(k)
child = a.children()[0]
child_k = None
if z3.is_app(child):
child_k = child.decl().kind()
child_pp = self.pp_expr(child, d+1, xs)
if k != child_k and self.is_infix_unary(child_k):
child_p = self.get_precedence(child_k)
if p <= child_p:
child_pp = self.add_paren(child_pp)
if z3.is_quantifier(child):
child_pp = self.add_paren(child_pp)
name = self.pp_name(a)
return compose(to_format(name), indent(_len(name), child_pp))
def pp_power_arg(self, arg, d, xs):
r = self.pp_expr(arg, d+1, xs)
k = None
if z3.is_app(arg):
k = arg.decl().kind()
if self.is_infix_unary(k) or (z3.is_rational_value(arg) and arg.denominator_as_long() != 1):
return self.add_paren(r)
else:
return r
def pp_power(self, a, d, xs):
arg1_pp = self.pp_power_arg(a.arg(0), d+1, xs)
arg2_pp = self.pp_power_arg(a.arg(1), d+1, xs)
return group(seq((arg1_pp, arg2_pp), '**', False))
def pp_neq(self):
return to_format("!=")
def pp_distinct(self, a, d, xs):
if a.num_args() == 2:
op = self.pp_neq()
sz = _len(op)
op.string = ' ' + op.string
op.size = sz + 1
return group(seq(self.infix_args(a, d, xs), op))
else:
return self.pp_prefix(a, d, xs)
def pp_select(self, a, d, xs):
if a.num_args() != 2:
return self.pp_prefix(a, d, xs)
else:
arg1_pp = self.pp_expr(a.arg(0), d+1, xs)
arg2_pp = self.pp_expr(a.arg(1), d+1, xs)
return compose(arg1_pp, indent(2, compose(to_format('['), arg2_pp, to_format(']'))))
def pp_unary_param(self, a, d, xs):
p = Z3_get_decl_int_parameter(a.ctx_ref(), a.decl().ast, 0)
arg = self.pp_expr(a.arg(0), d+1, xs)
return seq1(self.pp_name(a), [ to_format(p), arg ])
def pp_extract(self, a, d, xs):
h = Z3_get_decl_int_parameter(a.ctx_ref(), a.decl().ast, 0)
l = Z3_get_decl_int_parameter(a.ctx_ref(), a.decl().ast, 1)
arg = self.pp_expr(a.arg(0), d+1, xs)
return seq1(self.pp_name(a), [ to_format(h), to_format(l), arg ])
def pp_pattern(self, a, d, xs):
if a.num_args() == 1:
return self.pp_expr(a.arg(0), d, xs)
else:
return seq1('MultiPattern', [ self.pp_expr(arg, d+1, xs) for arg in a.children() ])
def pp_map(self, a, d, xs):
r = []
sz = 0
f = z3.get_map_func(a)
r.append(to_format(f.name()))
for child in a.children():
r.append(self.pp_expr(child, d+1, xs))
sz = sz + 1
if sz > self.max_args:
r.append(self.pp_ellipses())
break
return seq1(self.pp_name(a), r)
def pp_K(self, a, d, xs):
return seq1(self.pp_name(a), [ self.pp_sort(a.domain()), self.pp_expr(a.arg(0), d+1, xs) ])
def pp_app(self, a, d, xs):
if z3.is_int_value(a):
return self.pp_int(a)
elif z3.is_rational_value(a):
return self.pp_rational(a)
elif z3.is_algebraic_value(a):
return self.pp_algebraic(a)
elif z3.is_bv_value(a):
return self.pp_bv(a)
elif z3.is_fprm_value(a):
return self.pp_fprm_value(a)
elif z3.is_fp_value(a):
return self.pp_fp_value(a)
elif z3.is_fp(a):
return self.pp_fp(a, d, xs)
elif z3.is_const(a):
return self.pp_const(a)
else:
f = a.decl()
k = f.kind()
if k == Z3_OP_POWER:
return self.pp_power(a, d, xs)
elif k == Z3_OP_DISTINCT:
return self.pp_distinct(a, d, xs)
elif k == Z3_OP_SELECT:
return self.pp_select(a, d, xs)
elif k == Z3_OP_SIGN_EXT or k == Z3_OP_ZERO_EXT or k == Z3_OP_REPEAT:
return self.pp_unary_param(a, d, xs)
elif k == Z3_OP_EXTRACT:
return self.pp_extract(a, d, xs)
elif k == Z3_OP_ARRAY_MAP:
return self.pp_map(a, d, xs)
elif k == Z3_OP_CONST_ARRAY:
return self.pp_K(a, d, xs)
elif z3.is_pattern(a):
return self.pp_pattern(a, d, xs)
elif self.is_infix(k):
return self.pp_infix(a, d, xs)
elif self.is_unary(k):
return self.pp_unary(a, d, xs)
else:
return self.pp_prefix(a, d, xs)
def pp_var(self, a, d, xs):
idx = z3.get_var_index(a)
sz = len(xs)
if idx >= sz:
return seq1('Var', (to_format(idx),))
else:
return to_format(xs[sz - idx - 1])
def pp_quantifier(self, a, d, xs):
ys = [ to_format(a.var_name(i)) for i in range(a.num_vars()) ]
new_xs = xs + ys
body_pp = self.pp_expr(a.body(), d+1, new_xs)
if len(ys) == 1:
ys_pp = ys[0]
else:
ys_pp = seq3(ys, '[', ']')
if a.is_forall():
header = 'ForAll'
else:
header = 'Exists'
return seq1(header, (ys_pp, body_pp))
def pp_expr(self, a, d, xs):
self.visited = self.visited + 1
if d > self.max_depth or self.visited > self.max_visited:
return self.pp_ellipses()
if z3.is_app(a):
return self.pp_app(a, d, xs)
elif z3.is_quantifier(a):
return self.pp_quantifier(a, d, xs)
elif z3.is_var(a):
return self.pp_var(a, d, xs)
else:
return to_format(self.pp_unknown())
def pp_seq_core(self, f, a, d, xs):
self.visited = self.visited + 1
if d > self.max_depth or self.visited > self.max_visited:
return self.pp_ellipses()
r = []
sz = 0
for elem in a:
r.append(f(elem, d+1, xs))
sz = sz + 1
if sz > self.max_args:
r.append(self.pp_ellipses())
break
return seq3(r, '[', ']')
def pp_seq(self, a, d, xs):
return self.pp_seq_core(self.pp_expr, a, d, xs)
def pp_seq_seq(self, a, d, xs):
return self.pp_seq_core(self.pp_seq, a, d, xs)
def pp_model(self, m):
r = []
sz = 0
for d in m:
i = m[d]
if isinstance(i, z3.FuncInterp):
i_pp = self.pp_func_interp(i)
else:
i_pp = self.pp_expr(i, 0, [])
name = self.pp_name(d)
r.append(compose(name, to_format(' = '), indent(_len(name) + 3, i_pp)))
sz = sz + 1
if sz > self.max_args:
r.append(self.pp_ellipses())
break
return seq3(r, '[', ']')
def pp_func_entry(self, e):
num = e.num_args()
if num > 1:
args = []
for i in range(num):
args.append(self.pp_expr(e.arg_value(i), 0, []))
args_pp = group(seq3(args))
else:
args_pp = self.pp_expr(e.arg_value(0), 0, [])
value_pp = self.pp_expr(e.value(), 0, [])
return group(seq((args_pp, value_pp), self.pp_arrow()))
def pp_func_interp(self, f):
r = []
sz = 0
num = f.num_entries()
for i in range(num):
r.append(self.pp_func_entry(f.entry(i)))
sz = sz + 1
if sz > self.max_args:
r.append(self.pp_ellipses())
break
if sz <= self.max_args:
else_val = f.else_value()
if else_val == None:
else_pp = to_format('#unspecified')
else:
else_pp = self.pp_expr(else_val, 0, [])
r.append(group(seq((to_format('else'), else_pp), self.pp_arrow())))
return seq3(r, '[', ']')
def pp_list(self, a):
r = []
sz = 0
for elem in a:
if _support_pp(elem):
r.append(self.main(elem))
else:
r.append(to_format(str(elem)))
sz = sz + 1
if sz > self.max_args:
r.append(self.pp_ellipses())
break
if isinstance(a, tuple):
return seq3(r)
else:
return seq3(r, '[', ']')
def main(self, a):
if z3.is_expr(a):
return self.pp_expr(a, 0, [])
elif z3.is_sort(a):
return self.pp_sort(a)
elif z3.is_func_decl(a):
return self.pp_name(a)
elif isinstance(a, z3.Goal) or isinstance(a, z3.AstVector):
return self.pp_seq(a, 0, [])
elif isinstance(a, z3.Solver):
return self.pp_seq(a.assertions(), 0, [])
elif isinstance(a, z3.Fixedpoint):
return a.sexpr()
elif isinstance(a, z3.Optimize):
return a.sexpr()
elif isinstance(a, z3.ApplyResult):
return self.pp_seq_seq(a, 0, [])
elif isinstance(a, z3.ModelRef):
return self.pp_model(a)
elif isinstance(a, z3.FuncInterp):
return self.pp_func_interp(a)
elif isinstance(a, list) or isinstance(a, tuple):
return self.pp_list(a)
else:
return to_format(self.pp_unknown())
def __call__(self, a):
self.visited = 0
return self.main(a)
class HTMLFormatter(Formatter):
def __init__(self):
Formatter.__init__(self)
global _html_ellipses
self.ellipses = to_format(_html_ellipses)
def pp_arrow(self):
return to_format(' →', 1)
def pp_unknown(self):
return '<b>unknown</b>'
def pp_name(self, a):
r = _html_op_name(a)
if r[0] == '&' or r[0] == '/' or r[0] == '%':
return to_format(r, 1)
else:
pos = r.find('__')
if pos == -1 or pos == 0:
return to_format(r)
else:
sz = len(r)
if pos + 2 == sz:
return to_format(r)
else:
return to_format('%s<sub>%s</sub>' % (r[0:pos], r[pos+2:sz]), sz - 2)
def is_assoc(self, k):
return _is_html_assoc(k)
def is_left_assoc(self, k):
return _is_html_left_assoc(k)
def is_infix(self, a):
return _is_html_infix(a)
def is_unary(self, a):
return _is_html_unary(a)
def get_precedence(self, a):
return _get_html_precedence(a)
def pp_neq(self):
return to_format("≠")
def pp_power(self, a, d, xs):
arg1_pp = self.pp_power_arg(a.arg(0), d+1, xs)
arg2_pp = self.pp_expr(a.arg(1), d+1, xs)
return compose(arg1_pp, to_format('<sup>', 1), arg2_pp, to_format('</sup>', 1))
def pp_var(self, a, d, xs):
idx = z3.get_var_index(a)
sz = len(xs)
if idx >= sz:
# 957 is the greek letter nu
return to_format('ν<sub>%s</sub>' % idx, 1)
else:
return to_format(xs[sz - idx - 1])
def pp_quantifier(self, a, d, xs):
ys = [ to_format(a.var_name(i)) for i in range(a.num_vars()) ]
new_xs = xs + ys
body_pp = self.pp_expr(a.body(), d+1, new_xs)
ys_pp = group(seq(ys))
if a.is_forall():
header = '∀'
else:
header = '∃'
return group(compose(to_format(header, 1),
indent(1, compose(ys_pp, to_format(' :'), line_break(), body_pp))))
_PP = PP()
_Formatter = Formatter()
def set_pp_option(k, v):
if k == 'html_mode':
if v:
set_html_mode(True)
else:
set_html_mode(False)
return True
if k == 'fpa_pretty':
if v:
set_fpa_pretty(True)
else:
set_fpa_pretty(False)
return True
val = getattr(_PP, k, None)
if val != None:
z3._z3_assert(type(v) == type(val), "Invalid pretty print option value")
setattr(_PP, k, v)
return True
val = getattr(_Formatter, k, None)
if val != None:
z3._z3_assert(type(v) == type(val), "Invalid pretty print option value")
setattr(_Formatter, k, v)
return True
return False
def obj_to_string(a):
out = io.StringIO()
_PP(out, _Formatter(a))
return out.getvalue()
_html_out = None
def set_html_mode(flag=True):
global _Formatter
if flag:
_Formatter = HTMLFormatter()
else:
_Formatter = Formatter()
def set_fpa_pretty(flag=True):
global _Formatter
global _z3_op_to_str
_Formatter.fpa_pretty = flag
if flag:
for (_k,_v) in _z3_op_to_fpa_pretty_str.items():
_z3_op_to_str[_k] = _v
for _k in _z3_fpa_infix:
_infix_map[_k] = True
else:
for (_k,_v) in _z3_op_to_fpa_normal_str.items():
_z3_op_to_str[_k] = _v
for _k in _z3_fpa_infix:
_infix_map[_k] = False
set_fpa_pretty(True)
def in_html_mode():
return isinstance(_Formatter, HTMLFormatter)
def pp(a):
if _support_pp(a):
print(obj_to_string(a))
else:
print(a)
def print_matrix(m):
z3._z3_assert(isinstance(m, list) or isinstance(m, tuple), "matrix expected")
if not in_html_mode():
print(obj_to_string(m))
else:
print('<table cellpadding="2", cellspacing="0", border="1">')
for r in m:
z3._z3_assert(isinstance(r, list) or isinstance(r, tuple), "matrix expected")
print('<tr>')
for c in r:
print('<td>%s</td>' % c)
print('</tr>')
print('</table>')
def insert_line_breaks(s, width):
"""Break s in lines of size width (approx)"""
sz = len(s)
if sz <= width:
return s
new_str = io.StringIO()
w = 0
for i in range(sz):
if w > width and s[i] == ' ':
new_str.write(u('<br />'))
w = 0
else:
new_str.write(u(s[i]))
w = w + 1
return new_str.getvalue()
| jirislaby/z3 | src/api/python/z3printer.py | Python | mit | 40,207 |
"""
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
This file is part of the Smart Developer Hub Project:
http://www.smartdeveloperhub.org
Center for Open Middleware
http://www.centeropenmiddleware.com/
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Copyright (C) 2015 Center for Open Middleware.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
"""
import StringIO
import uuid
__author__ = 'Fernando Serena'
import pika
import sys
from rdflib import Graph, URIRef, RDF, Literal
from rdflib.namespace import Namespace, FOAF
import os
from datetime import datetime
CURATOR = Namespace('http://www.smartdeveloperhub.org/vocabulary/curator#')
TYPES = Namespace('http://www.smartdeveloperhub.org/vocabulary/types#')
AMQP = Namespace('http://www.smartdeveloperhub.org/vocabulary/amqp#')
accepted = False
def callback(ch, method, properties, body):
if 'state' in properties.headers:
if properties.headers['state'] == 'end':
channel.stop_consuming()
return
print 'chunk headers: ', properties.headers
for _ in eval(body):
print _
def accept_callback(ch, method, properties, body):
global accepted
if not accepted:
g = Graph()
g.parse(StringIO.StringIO(body), format='turtle')
if len(list(g.subjects(RDF.type, CURATOR.Accepted))) == 1:
print 'Request accepted!'
accepted = True
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
routing_key = ''
exchange = ''
graph = Graph()
script_dir = os.path.dirname(__file__)
with open(os.path.join(script_dir, 'query_usernames.ttl')) as f:
graph.parse(file=f, format='turtle')
req_node = list(graph.subjects(RDF.type, CURATOR.QueryRequest)).pop()
message_id = Literal(str(uuid.uuid4()), datatype=TYPES.UUID)
agent_id = Literal(str(uuid.uuid4()), datatype=TYPES.UUID)
graph.set((req_node, CURATOR.messageId, message_id))
graph.set((req_node, CURATOR.submittedOn, Literal(datetime.now())))
agent_node = list(graph.subjects(RDF.type, FOAF.Agent)).pop()
graph.set((agent_node, CURATOR.agentId, agent_id))
ch_node = list(graph.subjects(RDF.type, CURATOR.DeliveryChannel)).pop()
result = channel.queue_declare(exclusive=True)
queue_name = result.method.queue
# channel.queue_bind(exchange=exchange, queue=queue_name, routing_key=routing_key)
channel.basic_consume(callback, queue=queue_name, no_ack=True)
result = channel.queue_declare(exclusive=True)
accept_queue = result.method.queue
channel.queue_bind(exchange='sdh', queue=accept_queue, routing_key='curator.response.{}'.format(str(agent_id)))
channel.basic_consume(accept_callback, queue=accept_queue, no_ack=True)
# graph.set((ch_node, AMQP.queueName, Literal(queue_name)))
graph.set((ch_node, AMQP.routingKey, Literal(queue_name)))
graph.set((ch_node, AMQP.exchangeName, Literal(exchange)))
message = graph.serialize(format='turtle')
channel.basic_publish(exchange='sdh',
routing_key='curator.request.query',
body=message)
channel.start_consuming()
| SmartDeveloperHub/sdh-curator | sdh/curator/test/query.py | Python | apache-2.0 | 3,809 |
# Copyright (C) 2010 Oregon State University et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
from collections import defaultdict
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.shortcuts import render_to_response
from django.template import RequestContext
from ..forms.importing import ImportForm, OrphanForm, VirtualMachineForm
from .generic import NO_PRIVS
from ganeti_webmgr.clusters.models import Cluster
from ganeti_webmgr.virtualmachines.models import VirtualMachine
@login_required
def orphans(request):
"""
displays list of orphaned VirtualMachines, i.e. VirtualMachines without
an owner.
"""
user = request.user
if user.is_superuser:
clusters = Cluster.objects.all()
else:
clusters = user.get_objects_any_perms(Cluster, ['admin'])
if not clusters:
raise PermissionDenied(NO_PRIVS)
vms_with_cluster = VirtualMachine.objects.filter(owner=None,
cluster__in=clusters) \
.order_by('hostname').values_list('id', 'hostname', 'cluster')
if request.method == 'POST':
# strip cluster from vms
vms = [(i[0], i[1]) for i in vms_with_cluster]
# process updates if this was a form submission
form = OrphanForm(vms, request.POST)
if form.is_valid():
# update all selected VirtualMachines
data = form.cleaned_data
owner = data['owner']
vm_ids = data['virtual_machines']
# update the owner and save the vm. This isn't the most efficient
# way of updating the VMs but we would otherwise need to group them
# by cluster
orphaned = defaultdict(lambda: 0)
for id in vm_ids:
vm = VirtualMachine.objects.get(id=id)
vm.owner = owner
vm.save()
orphaned[vm.cluster_id] -= 1
# remove updated vms from the list
vms_with_cluster = [i for i in vms_with_cluster
if unicode(i[0]) not in vm_ids]
else:
# strip cluster from vms
form = ImportForm([(i[0], i[1]) for i in vms_with_cluster])
clusterdict = {}
for i in clusters:
clusterdict[i.id] = i.hostname
vms = [(i[0], clusterdict[i[2]],
i[1]) for i in vms_with_cluster]
return render_to_response("ganeti/importing/orphans.html",
{'vms': vms,
'form': form, },
context_instance=RequestContext(request), )
@login_required
def missing_ganeti(request):
"""
View for displaying VirtualMachines missing from the ganeti cluster
"""
user = request.user
if user.is_superuser:
clusters = Cluster.objects.all()
else:
clusters = user.get_objects_any_perms(Cluster, ['admin'])
if not clusters:
raise PermissionDenied(NO_PRIVS)
vms = []
for cluster in clusters:
for vm in cluster.missing_in_ganeti:
vms.append((vm, vm))
if request.method == 'POST':
# process updates if this was a form submission
form = VirtualMachineForm(vms, request.POST)
if form.is_valid():
# update all selected VirtualMachines
data = form.cleaned_data
vm_ids = data['virtual_machines']
q = VirtualMachine.objects.filter(hostname__in=vm_ids)
missing = defaultdict(lambda: 0)
for i in q:
missing[i.cluster_id] -= 1
q.delete()
# remove updated vms from the list
vms = filter(lambda x: unicode(x[0]) not in vm_ids, vms)
else:
form = VirtualMachineForm(vms)
vms = {}
for cluster in clusters:
for vm in cluster.missing_in_ganeti:
vms[vm] = (cluster.hostname, vm)
vmhostnames = vms.keys()
vmhostnames.sort()
vms_tuplelist = []
for i in vmhostnames:
vms_tuplelist.append((i, vms[i][0], vms[i][1]))
vms = vms_tuplelist
return render_to_response("ganeti/importing/missing.html",
{'vms': vms,
'form': form, },
context_instance=RequestContext(request), )
@login_required
def missing_db(request):
"""
View for displaying VirtualMachines missing from the database
"""
user = request.user
if user.is_superuser:
clusters = Cluster.objects.all()
else:
clusters = user.get_objects_any_perms(Cluster, ['admin'])
if not clusters:
raise PermissionDenied(NO_PRIVS)
vms = []
for cluster in clusters:
for hostname in cluster.missing_in_db:
vms.append(('%s:%s' % (cluster.id, hostname), hostname))
if request.method == 'POST':
# process updates if this was a form submission
form = ImportForm(vms, request.POST)
if form.is_valid():
# update all selected VirtualMachines
data = form.cleaned_data
owner = data['owner']
vm_ids = data['virtual_machines']
import_ready = defaultdict(lambda: 0)
orphaned = defaultdict(lambda: 0)
# create missing VMs
for vm in vm_ids:
cluster_id, host = vm.split(':')
cluster = Cluster.objects.get(id=cluster_id)
VirtualMachine(hostname=host, cluster=cluster,
owner=owner).save()
import_ready[cluster.pk] -= 1
if owner is None:
orphaned[cluster.pk] += 1
# remove created vms from the list
vms = filter(lambda x: unicode(x[0])
not in vm_ids, vms)
else:
form = ImportForm(vms)
vms = {}
for cluster in clusters:
for hostname in cluster.missing_in_db:
vms[hostname] = (u'%s:%s' % (cluster.id, hostname),
unicode(cluster.hostname), unicode(hostname))
vmhostnames = vms.keys()
vmhostnames.sort()
vms_tuplelist = []
for i in vmhostnames:
vms_tuplelist.append(vms[i])
vms = vms_tuplelist
return render_to_response("ganeti/importing/missing_db.html",
{'vms': vms,
'form': form,
},
context_instance=RequestContext(request), )
| dmS0Zq/ganeti_webmgr | ganeti_webmgr/ganeti_web/views/importing.py | Python | gpl-2.0 | 7,249 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
from airflow.models import DAG
from airflow.operators.dummy import DummyOperator
DEFAULT_DATE = datetime(2000, 1, 1)
dag1 = DAG(dag_id='exit_test_dag', start_date=DEFAULT_DATE)
dag1_task1 = DummyOperator(task_id='dummy', dag=dag1, owner='airflow')
| airbnb/airflow | tests/dags_with_system_exit/b_test_scheduler_dags.py | Python | apache-2.0 | 1,070 |
# Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from barf import BARF
import analysis
import core
import arch
| jslhs/barf-project | barf/barf/__init__.py | Python | bsd-2-clause | 1,409 |
import datetime
from loguru import logger
from flexget import plugin
from flexget.event import event
from flexget.utils.database import Session
logger = logger.bind(name='est_movies_bluray')
class EstimatesMoviesBluray:
@plugin.priority(2)
def estimate(self, entry):
if 'movie_name' not in entry:
return
movie_name = entry['movie_name']
movie_year = entry.get('movie_year')
if movie_year is not None and movie_year > datetime.datetime.now().year:
logger.debug('Skipping Blu-ray.com lookup since movie year is {}', movie_year)
return
logger.debug('Searching Blu-ray.com for release date of {} ({})', movie_name, movie_year)
release_date = None
try:
with Session() as session:
lookup = plugin.get('api_bluray', self).lookup
movie = lookup(title=movie_name, year=movie_year, session=session)
if movie:
release_date = movie.release_date
except LookupError as e:
logger.debug(e)
if release_date:
logger.debug('received release date: {}', release_date)
return release_date
@event('plugin.register')
def register_plugin():
plugin.register(
EstimatesMoviesBluray, 'est_movies_bluray', interfaces=['estimate_release'], api_ver=2
)
| ianstalk/Flexget | flexget/components/estimate_release/estimators/est_movies_bluray.py | Python | mit | 1,377 |
#!python
"""
Copyright (C) 2004-2017 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
pg_hba.py - utilities for handling the pg_hba.conf file
"""
import os
import re
import sys
# only necessary for module testing:
# import pprint
ENTRY_FIELDS = ('type', 'database', 'user', 'ipmask', 'address', 'authmethod', 'auth_options')
class PgHba(object):
"""Class for parsing/searching/editing the pg_hba.conf file"""
# Modified regex to support all record formats in pg_hba and also
# support tabs as well as spaces. From postgres docs, a pg_hba
# record can have one of the following seven formats:
# 1. local database user auth-method [auth-option]
# 2. host database user CIDR-address auth-method [auth-option]
# 3. hostssl database user CIDR-address auth-method [auth-option]
# 4. hostnossl database user CIDR-address auth-method [auth-option]
# 5. host database user IP-address IP-mask auth-method [auth-option]
# 6. hostssl database user IP-address IP-mask auth-method [auth-option]
# 7. hostnossl database user IP-address IP-mask auth-method [auth-option]
#
# _re_entry1 matches record type 1
# _re_entry2 matches record types 2,3,4
# _re_entry3 matches record types 5,6,7
_re_entry1 = re.compile(
r'^\s*(?P<type>local)(\s|\t)+(?P<database>\S+)(\s|\t)+(?P<user>\S+)(\s|\t)+(?P<method>\S+)(\s|\t)+(\b\S+=\S+\b\s*)*')
_re_entry2 = re.compile(
r'^\s*(?P<type>\S+)(\s|\t)+(?P<database>\S+)(\s|\t)+(?P<user>\S+)(\s|\t)+(?P<address>\S+)(\s|\t)+(?P<method>\S+)(\s|\t)+(\b\S+=\S+\b\s*)*')
_re_entry3 = re.compile(
r'^\s*(?P<type>\S+)(\s|\t)+(?P<database>\S+)(\s|\t)+(?P<user>\S+)(\s|\t)+(?P<address>\S+)(\s|\t)+(?P<ipmask>\d+\.\d+.\d+.\d+\/*\d*)(\s|\t)+(?P<method>\S+)(\s|\t)+(\b\S+=\S+\b\s*)*')
# ignore comments etc
_re_ignore = re.compile(r'^(\s*#.*|\s*)$')
def __init__(self, file_path=None):
"""
Return a new, empty pg_hba object.
@param file_path: path to a pg_hba.conf file to parse/edit.
"""
if not file_path:
if not os.environ.has_key('MASTER_DATA_DIRECTORY'):
raise Exception("A pg_hba.conf file must be provided or MASTER_DATA_DIRECTORY must point to one")
self._file_path = os.path.join(os.environ['MASTER_DATA_DIRECTORY'], 'pg_hba.conf')
else:
self._file_path = file_path
self._parse()
def _parse(self):
"""Read and parse a pg_hba.conf file"""
self._contents = list()
fdr = open(self._file_path, 'rb')
for line in fdr:
if self._re_ignore.match(line):
self._contents.append(line)
else:
# Match with one of the three possible regex defined above
line_match = self._re_entry1.match(line)
if line_match is not None:
# Parse contents and form entry. auth_opts_index will be 8 for _re_entry1
self._parse_contents_match(line_match, 8)
continue
line_match = self._re_entry2.match(line)
if line_match is not None:
# Parse contents and form entry. auth_opts_index will be 10 for _re_entry2
self._parse_contents_match(line_match, 10)
continue
line_match = self._re_entry3.match(line)
if line_match is not None:
# Parse contents and form entry. auth_opts_index will be 12 for _re_entry3
self._parse_contents_match(line_match, 12)
continue
else:
raise PgHbaException("error parsing file. Did not match expected regex." + line)
fdr.close()
def _parse_contents_match(self, line_match, auth_opts_index):
"""
Parses match object to form an appropriate Entry object.
@param line_match: line_match object returned by re.match()
@param auth_opts_index:
The index at which auth_opts should be located. 8 for _re_entry1, 10 for _re_entry2 and 12 for _re_entry3
"""
match_groups = line_match.groups()
auth_opts = dict()
try:
for grp in match_groups[auth_opts_index:]:
if grp is None:
break
key, val = grp.split('=')
auth_opts[key] = val
except IndexError:
pass
if auth_opts_index == 8:
ent = Entry(entry_type=line_match.group('type'),
database=line_match.group('database'),
user=line_match.group('user'),
authmethod=line_match.group('method'),
auth_options=auth_opts)
self._contents.append(ent)
return
if auth_opts_index == 10:
ent = Entry(entry_type=line_match.group('type'),
database=line_match.group('database'),
user=line_match.group('user'),
address=line_match.group('address'),
authmethod=line_match.group('method'),
auth_options=auth_opts)
self._contents.append(ent)
return
if auth_opts_index == 12:
ent = Entry(entry_type=line_match.group('type'),
database=line_match.group('database'),
user=line_match.group('user'),
address=line_match.group('address'),
ipmask=line_match.group('ipmask'),
authmethod=line_match.group('method'),
auth_options=auth_opts)
self._contents.append(ent)
return
def write(self, out_file=None):
"""Write the pg_hba.conf file from its parsed contents."""
if not out_file:
out_file = self._file_path
fdw = open(out_file, 'wb')
for line in self._contents:
if isinstance(line, Entry) and line.is_deleted():
continue
fdw.write(str(line))
fdw.close()
def search(self, **kwargs):
"""Search for an entry in a parsed file, return list of matches.
@param type: the type field
@param database:
@param user:
@param authmethod:
@param type:
@param database:
@param user:
@param auth_options:
"""
for k in kwargs.keys():
if k not in ENTRY_FIELDS:
raise PgHbaException("%s is not a valid entry field" % k)
result = list()
match_mapping = dict()
for line in self._contents:
if not isinstance(line, Entry):
continue
for key in kwargs.keys():
match_mapping[key] = False
all_match = True
for key, val in kwargs.items():
attr = line.get_attribute(key)
if attr:
if attr.count(val) > 0:
match_mapping[key] = True
for key, val in match_mapping.items():
if not val:
all_match = False
if all_match:
result.append(line)
return result
def add_entry(self, ent):
"""add an entry object to the end of the file if not already present"""
result = self.search(**ent.get_search_fields())
if not result:
self._contents.append(ent)
else:
print "already have entry for: %s" % ent
def get_contents(self):
"""return the parsed contents as a list"""
return self._contents
def get_contents_without_comments(self):
"""return the parsed contents, without comments, as a list"""
result = []
for line in self._contents:
if not isinstance(line, Entry):
continue
result.append(line)
return result
def __str__(self):
"""return a string representation of the contents of the file"""
buf = ""
for line in self._contents:
buf += str(line)
return buf
class PgHbaException(Exception):
"""exception that can be raised by PgHba objects"""
pass
class Entry(object):
"""container class for representing an entry in a pg_hba.conf file"""
_types = ('local', 'host', 'hostssl', 'hostnossl')
_methods = ('trust', 'reject', 'md5', 'password', 'gss', 'sspi',
'krb5', 'ident', 'pam', 'ldap', 'radius', 'cert')
def __init__(self, entry_type=None, database=None,
user=None, authmethod=None,
ipmask=None, address=None,
auth_options=None):
"""
Create a new pga_conf.entry object.
@param type: the type field
@param database: the database field
@param user: the user field
@param authmethod: the authmethod field
@param ipmask: the ipmask field
@param address: the address field
@param auth_options: the auth options field
"""
if entry_type is None or \
database is None or \
user is None or \
authmethod is None:
raise PgHbaException("pg_hba.entry must provide with type, database, address, user")
if auth_options is None:
auth_options = dict()
self._fields = dict()
self.set_type(entry_type)
self._fields['database'] = database
self._fields['user'] = user
self._fields['ipmask'] = ipmask
self._fields['address'] = address
self.set_authmethod(authmethod)
self._fields['auth_options'] = auth_options
self._deleted = False
def get_search_fields(self):
return dict(type=self.get_type(),
database=self.get_database(),
user=self.get_user(),
authmethod=self.get_authmethod())
def delete(self):
"""mark this entry as deleted"""
self._deleted = True
def is_deleted(self):
"""return True/False if this entry has been marked as deleted"""
return self._deleted
def __str__(self):
"""return a string that can be inserted into a pg_hba.conf file"""
a_opts = ""
if self._fields['auth_options']:
a_opts = " ".join(["%s=%s" % (key, val)
for key, val in self._fields['auth_options'].items()])
return "%s \t %s \t %s \t %s \t %s \t %s \t %s\n" % \
(self._fields['type'],
self._fields['database'],
self._fields['user'],
self._fields['address'] or "",
self._fields['ipmask'] or "",
self._fields['authmethod'],
a_opts)
def get_attribute(self, name):
"""get entry fields by attribute name"""
return self._fields[name]
def get_type(self):
"""get entry type field"""
return self._fields['type']
def get_database(self):
"""get entry database field"""
return self._fields['database']
def get_user(self):
"""get entry user field"""
return self._fields['user']
def get_address(self):
"""get entry address field"""
return self._fields['address']
def get_authmethod(self):
"""get entry authmethod field"""
return self._fields['authmethod']
def get_auth_options(self):
"""get entry auth options field, as a dict"""
return self._fields['auth_options']
def get_ipmask(self):
"""get entry ipmask field"""
return self._fields['ipmask']
def set_type(self, entry_type):
"""set entry type field"""
if entry_type not in self._types:
raise PgHbaException("type '%s' unsupported, supported types are: %s" %
(entry_type, ', '.join(["'%s'" % sup_type for sup_type in self._types])))
self._fields['type'] = entry_type
def set_database(self, database):
"""set entry database field"""
self._fields['database'] = database
def set_user(self, user):
"""set entry user field"""
self._fields['user'] = user
def set_address(self, address):
"""set entry address field"""
self._fields['address'] = address
def set_authmethod(self, authmethod):
"""set entry authmethod field"""
if authmethod not in self._methods:
raise PgHbaException("auth method '%s' unsupported, supported methods are: %s" %
(authmethod, ', '.join(["'%s'" % method for method in self._methods])))
self._fields['authmethod'] = authmethod
def set_ipmask(self, ipmask):
"""set entry ipmask field"""
self._fields['ipmask'] = ipmask
def set_auth_options(self, key, val):
"""set entry auth option, by name"""
self._fields['auth_options'][key] = val
| edespino/gpdb | gpMgmt/test/behave_utils/PgHba.py | Python | apache-2.0 | 13,767 |
#!/usr/bin/python3
# requirements: btrfs on /dev/sdc1 with qgroup 1/0 and 1/1
from storage import *
from storageitu import *
set_logger(get_logfile_logger())
environment = Environment(False)
storage = Storage(environment)
storage.probe()
staging = storage.get_staging()
print(staging)
blk_device = BlkDevice.find_by_name(staging, "/dev/sdc1")
btrfs = to_btrfs(blk_device.get_blk_filesystem())
qgroup1 = btrfs.find_btrfs_qgroup_by_id(BtrfsQgroupId(1, 0))
if qgroup1.has_referenced_limit():
qgroup1.clear_referenced_limit()
else:
qgroup1.set_referenced_limit(2 * GiB)
if qgroup1.has_exclusive_limit():
qgroup1.clear_exclusive_limit()
else:
qgroup1.set_exclusive_limit(1 * GiB)
qgroup2 = btrfs.find_btrfs_qgroup_by_id(BtrfsQgroupId(1, 1))
if qgroup2.has_referenced_limit():
qgroup2.clear_referenced_limit()
else:
qgroup2.set_referenced_limit(2 * GiB)
if qgroup2.has_exclusive_limit():
qgroup2.clear_exclusive_limit()
else:
qgroup2.set_exclusive_limit(1 * GiB)
print(staging)
commit(storage)
| aschnell/libstorage-ng | integration-tests/filesystems/btrfs/quota/set-limits.py | Python | gpl-2.0 | 1,040 |
import os
# For backward compatibility, we import get_qapp here
from ..external.qt import get_qapp
def teardown():
# can be None if exceptions are raised early during setup -- #323
if get_qapp is not None:
app = get_qapp()
app.exit()
_app = get_qapp()
import atexit
atexit.register(teardown)
| JudoWill/glue | glue/qt/__init__.py | Python | bsd-3-clause | 320 |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import signal
import mock
from oslo_utils import uuidutils
from neutron.agent.l3 import ha_router
from neutron.agent.l3 import router_info
from neutron.tests import base
from neutron.tests.common import l3_test_common
from neutron.tests import tools
_uuid = uuidutils.generate_uuid
class TestBasicRouterOperations(base.BaseTestCase):
def setUp(self):
super(TestBasicRouterOperations, self).setUp()
self.device_exists_p = mock.patch(
'neutron.agent.linux.ip_lib.device_exists')
self.device_exists = self.device_exists_p.start()
def _create_router(self, router=None, **kwargs):
if not router:
router = mock.MagicMock()
self.agent_conf = mock.Mock()
self.router_id = _uuid()
return ha_router.HaRouter(mock.sentinel.enqueue_state,
mock.sentinel.agent,
self.router_id,
router,
self.agent_conf,
mock.sentinel.driver,
**kwargs)
def test_get_router_cidrs_returns_ha_cidrs(self):
ri = self._create_router()
device = mock.MagicMock()
device.name.return_value = 'eth2'
addresses = ['15.1.2.2/24', '15.1.2.3/32']
ri._get_cidrs_from_keepalived = mock.MagicMock(return_value=addresses)
self.assertEqual(set(addresses), ri.get_router_cidrs(device))
def test__add_default_gw_virtual_route(self):
ri = self._create_router()
mock_instance = mock.Mock()
mock_instance.virtual_routes.gateway_routes = []
ri._get_keepalived_instance = mock.Mock(return_value=mock_instance)
subnets = [{'id': _uuid(),
'cidr': '20.0.0.0/24',
'gateway_ip': None}]
ex_gw_port = {'fixed_ips': [],
'subnets': subnets,
'extra_subnets': [],
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef'}
# Make sure no exceptional code
ri._add_default_gw_virtual_route(ex_gw_port, 'qg-abc')
self.assertEqual(0, len(mock_instance.virtual_routes.gateway_routes))
subnets.append({'id': _uuid(),
'cidr': '30.0.0.0/24',
'gateway_ip': '30.0.0.1'})
ri._add_default_gw_virtual_route(ex_gw_port, 'qg-abc')
self.assertEqual(1, len(mock_instance.virtual_routes.gateway_routes))
subnets[1]['gateway_ip'] = None
ri._add_default_gw_virtual_route(ex_gw_port, 'qg-abc')
self.assertEqual(0, len(mock_instance.virtual_routes.gateway_routes))
@mock.patch.object(router_info.RouterInfo, 'remove_floating_ip')
def test_remove_floating_ip(self, super_remove_floating_ip):
ri = self._create_router(mock.MagicMock())
mock_instance = mock.Mock()
ri._get_keepalived_instance = mock.Mock(return_value=mock_instance)
device = mock.Mock()
fip_cidr = '15.1.2.3/32'
ri.remove_floating_ip(device, fip_cidr)
self.assertTrue(super_remove_floating_ip.called)
def test_destroy_state_change_monitor_ok(self):
ri = self._create_router(mock.MagicMock())
# need a port for destroy_state_change_monitor() to call PM code
ri.ha_port = {'id': _uuid()}
with mock.patch.object(ri,
'_get_state_change_monitor_process_manager')\
as m_get_state:
mock_pm = m_get_state.return_value
mock_pm.active = False
ri.destroy_state_change_monitor(mock_pm)
mock_pm.disable.assert_called_once_with(
sig=str(int(signal.SIGTERM)))
def test_destroy_state_change_monitor_force(self):
ri = self._create_router(mock.MagicMock())
# need a port for destroy_state_change_monitor() to call PM code
ri.ha_port = {'id': _uuid()}
with mock.patch.object(ri,
'_get_state_change_monitor_process_manager')\
as m_get_state:
mock_pm = m_get_state.return_value
mock_pm.active = False
with mock.patch.object(ha_router, 'SIGTERM_TIMEOUT', 0):
ri.destroy_state_change_monitor(mock_pm)
calls = ["sig='str(%d)'" % signal.SIGTERM,
"sig='str(%d)'" % signal.SIGKILL]
mock_pm.disable.has_calls(calls)
def _test_ha_state(self, read_return, expected):
ri = self._create_router(mock.MagicMock())
ri.keepalived_manager = mock.Mock()
ri.keepalived_manager.get_full_config_file_path.return_value = (
'ha_state')
self.mock_open = self.useFixture(
tools.OpenFixture('ha_state', read_return)).mock_open
self.assertEqual(expected, ri.ha_state)
def test_ha_state_master(self):
self._test_ha_state('master', 'master')
def test_ha_state_unknown(self):
# an empty state file should yield 'unknown'
self._test_ha_state('', 'unknown')
def test_ha_state_ioerror(self):
# an error reading the state file should yield 'unknown'
ri = self._create_router(mock.MagicMock())
ri.keepalived_manager = mock.Mock()
ri.keepalived_manager.get_full_config_file_path.return_value = (
'ha_state')
self.mock_open = IOError
self.assertEqual('unknown', ri.ha_state)
def test_gateway_ports_equal(self):
ri = self._create_router(mock.MagicMock())
ri.driver = mock.MagicMock()
subnet_id, qos_policy_id = _uuid(), _uuid()
_, old_gw_port = l3_test_common.prepare_ext_gw_test(
self, ri, True)
old_gw_port['qos_policy_id'] = qos_policy_id
new_gw_port = copy.deepcopy(old_gw_port)
new_gw_port.update({'binding:host_id': 'node02',
'updated_at': '2018-11-02T14:07:00',
'revision_number': 101,
'qos_policy_id': qos_policy_id})
self.assertTrue(ri._gateway_ports_equal(old_gw_port, new_gw_port))
fixed_ip = {'ip_address': '10.10.10.3', 'subnet_id': subnet_id}
new_gw_port['fixed_ips'].append(fixed_ip)
self.assertFalse(ri._gateway_ports_equal(old_gw_port, new_gw_port))
new_gw_port['fixed_ips'].remove(fixed_ip)
new_gw_port['qos_policy_id'] = _uuid()
self.assertFalse(ri._gateway_ports_equal(old_gw_port, new_gw_port))
| noironetworks/neutron | neutron/tests/unit/agent/l3/test_ha_router.py | Python | apache-2.0 | 7,211 |
#!/usr/bin/python
def solve():
combinations = {}
opposed = {}
l = raw_input().split(' ')
c = int(l.pop(0))
for i in range(c):
c1, c2, r = l.pop(0)
combinations[tuple(sorted((c1, c2)))] = r
d = int(l.pop(0))
for i in range(d):
d1, d2 = l.pop(0)
if d1 in opposed:
opposed[d1].add(d2)
else:
opposed[d1] = set([d2])
if d2 in opposed:
opposed[d2].add(d1)
else:
opposed[d2] = set([d1])
n = int(l.pop(0))
s = list(l.pop(0))
empty_set = set()
result = ['#', ]
for char in s:
result.append(char)
combine = combinations.get(tuple(sorted(result[-2:])), None)
if combine:
result = result[:-2]
result.append(combine)
elif opposed.get(char, empty_set).intersection(result):
result = ['#', ]
return str(result[1:]).replace("'", "")
if __name__ == '__main__':
T = int(raw_input())
for c in range(1, T+1):
print "Case #{0}: {1}".format(c, solve())
| sanSS/programming-contests | codejam/2011/Qualification/B.py | Python | gpl-3.0 | 1,108 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2016, ICEA
#
# This file is part of atn-sim
#
# atn-sim is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# atn-sim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import binascii
import os
__author__ = "Alexandre Magno"
__version__ = "0.1"
__date__ = "2016-dec-08"
class Message(object):
def __init__(self):
# endereco icao
self._icao24 = None
# callsign
self._szCSign = None
# message
self._szMessage = None
def __str__(self):
output = ""
output += "_icao24...: %s\n" % self._icao24
output += "_szCSign..: %s\n" % self._szCSign
output += "_szMessage: %s\n" % self._szMessage
return output
#
# end class Message
#
if __name__ == "__main__":
# instance object Message
l_Message = Message()
# initialize attributs of object
l_Message._icao24 = binascii.b2a_hex(os.urandom(3))
l_Message._szCSign = 'TAM7777'
l_Message._szMessage = None
# output
print ""
print l_Message
print ""
| icea-dev/atn-sim | atn/surveillance/icea/Message.py | Python | gpl-3.0 | 1,837 |
#!/usr/bin/env python
# coding: utf8
from gluon.html import *
from gluon.http import *
from gluon.validators import *
from gluon.sqlhtml import *
class Auth:
@staticmethod
def isLoggedIn(session):
if session.logged_in:
if session.user and session.user.is_logged_in():
return True
else:
return False
@staticmethod
def get_user(session):
return session.user
| davidhampgonsalves/mix-tree | init/modules/simple_auth.py | Python | mit | 441 |
# -*- coding: utf-8 -*-
# Copyright (c) 2019 SubDownloader Developers - See COPYING - GPLv3
import logging
log = logging.getLogger('subdownloader.callback.ProgressCallback')
class ProgressCallback(object):
"""
This class allows calling function to be informed of eventual progress.
Subclasses should only override the on_*** function members.
"""
def __init__(self, minimum=None, maximum=None):
"""
Create a a new ProgressCallback object.
:param minimum: minimum value of the range (None if no percentage is required)
:param maximum: maximum value of the range (None if no percentage is required)
"""
log.debug('init: min={min}, max={max}'.format(min=minimum, max=maximum))
self._min = minimum
self._max = maximum
self._canceled = False
self._finished = False
def range_initialized(self):
"""
Check whether a range is set.
"""
return None not in self.get_range()
def set_range(self, minimum, maximum):
"""
Set a range.
The range is passed unchanged to the rangeChanged member function.
:param minimum: minimum value of the range (None if no percentage is required)
:param maximum: maximum value of the range (None if no percentage is required)
"""
self._min = minimum
self._max = maximum
self.on_rangeChange(minimum, maximum)
def get_range(self):
"""
Returns the minimum and maximum
:return: A tuple with the minimum and maximum
"""
return self._min, self._max
def get_child_progress(self, parent_min, parent_max):
"""
Create a new child ProgressCallback.
Minimum and maximum values of the child are mapped to parent_min and parent_max of this parent ProgressCallback.
:param parent_min: minimum value of the child is mapped to parent_min of this parent ProgressCallback
:param parent_max: maximum value of the child is mapped to parent_max of this parent ProgressCallback
:return: instance of SubProgressCallback
"""
return SubProgressCallback(parent=self, parent_min=parent_min, parent_max=parent_max)
def update(self, value, *args, **kwargs):
"""
Call this function to inform that an update is available.
This function does NOT call finish when value == maximum.
:param value: The current index/position of the action. (Should be, but must not be, in the range [min, max])
:param args: extra positional arguments to pass on
:param kwargs: extra keyword arguments to pass on
"""
log.debug('update(value={value}, args={args}, kwargs={kwargs})'.format(value=value, args=args, kwargs=kwargs))
self.on_update(value, *args, **kwargs)
def finish(self, *args, **kwargs):
"""
Call this function to inform that the operation is finished.
:param args: extra positional arguments to pass on
:param kwargs: extra keyword arguments to pass on
"""
log.debug('finish(args={args}, kwargs={kwargs})'.format(args=args, kwargs=kwargs))
self._finished = True
self.on_finish(*args, **kwargs)
def cancel(self):
"""
Call this function to inform that the operation has been cancelled.
"""
log.debug('cancel()')
self._canceled = True
self.on_cancel()
def on_rangeChange(self, minimum, maximum):
"""
Override this function if a custom action is required upon range change.
:param minimum: New minimum value
:param maximum: New maximum value
"""
pass
def on_update(self, value, *args, **kwargs):
"""
Override this function if a custom update action is required.
:param value: The current index/position of the action. (Should be, but must not be, in the range [min, max])
:param args: extra positional arguments to pass on
:param kwargs: extra keyword arguments to pass on
"""
pass
def on_finish(self, *args, **kwargs):
"""
Override this function if a custom finish action is required.
:param args: extra positional arguments to pass on
:param kwargs: extra keyword arguments to pass on
"""
pass
def on_cancel(self):
"""
Override this function if a custom cancel action is required
"""
pass
def canceled(self):
"""
Return true when the progress has been canceled.
:return: Boolean value
"""
return self._canceled
def finished(self):
"""
Return true when the progress has been finished.
:return: Boolean value
"""
return self._finished
class SubProgressCallback(ProgressCallback):
"""
A SubProgressCallback is a ProgressCallback that will map updates to the parent updates.
"""
def __init__(self, parent, parent_min, parent_max):
"""
Initialize a new SubProgresCallback.
The range [min, max) of this SubProgressCallback are mapped to [parent_min, parent_max) of the parent callback.
:param parent: The parent ProgressCallback
:param parent_min: The minimum value of the parent
:param parent_max: The maximum value of the parent
"""
ProgressCallback.__init__(self)
self._parent = parent
self._parent_min = parent_min
self._parent_max = parent_max
def on_update(self, value, *args, **kwargs):
"""
Inform the parent of progress.
:param value: The value of this subprogresscallback
:param args: Extra positional arguments
:param kwargs: Extra keyword arguments
"""
parent_value = self._parent_min
if self._max != self._min:
sub_progress = (value - self._min) / (self._max - self._min)
parent_value = self._parent_min + sub_progress * (self._parent_max - self._parent_min)
self._parent.update(parent_value, *args, **kwargs)
def on_cancel(self):
"""
If a SubProgressCallback is canceled, cancel the parent ProgressCallback.
:return:
"""
self._parent.cancel()
| subdownloader/subdownloader | subdownloader/callback.py | Python | gpl-3.0 | 6,305 |
from abc import ABCMeta, abstractmethod
class Parent(object):
__metaclass__ = ABCMeta
@abstractmethod
def my_method2(self):
pass
@abstractmethod
def my_method(self, foo):
pass | akosyakov/intellij-community | python/testData/refactoring/pullup/abstractMethodHasMeta/SuperClass.after.py | Python | apache-2.0 | 215 |
# Copyright (c) 2009-2013 Samuel Stauffer <samuel@descolada.com>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Samuel Stauffer nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__all__ = ['Atom', 'Reference', 'Port', 'PID', 'Export']
class Atom(str):
def __repr__(self):
return "Atom(%s)" % super(Atom, self).__repr__()
class Reference(object):
def __init__(self, node, ref_id, creation):
if not isinstance(ref_id, tuple):
ref_id = tuple(ref_id)
self.node = node
self.ref_id = ref_id
self.creation = creation
def __eq__(self, other):
return isinstance(other, Reference) and self.node == other.node and self.ref_id == other.ref_id and self.creation == other.creation
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return "#Ref<%d.%s>" % (self.creation, ".".join(str(i) for i in self.ref_id))
def __repr__(self):
return "%s::%s" % (self.__str__(), self.node)
class Port(object):
def __init__(self, node, port_id, creation):
self.node = node
self.port_id = port_id
self.creation = creation
def __eq__(self, other):
return isinstance(other, Port) and self.node == other.node and self.port_id == other.port_id and self.creation == other.creation
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return "#Port<%d.%d>" % (self.creation, self.port_id)
def __repr__(self):
return "%s::%s" % (self.__str__(), self.node)
class PID(object):
def __init__(self, node, pid_id, serial, creation):
self.node = node
self.pid_id = pid_id
self.serial = serial
self.creation = creation
def __eq__(self, other):
return isinstance(other, PID) and self.node == other.node and self.pid_id == other.pid_id and self.serial == other.serial and self.creation == other.creation
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return "<%d.%d.%d>" % (self.creation, self.pid_id, self.serial)
def __repr__(self):
return "%s::%s" % (self.__str__(), self.node)
class Export(object):
def __init__(self, module, function, arity):
self.module = module
self.function = function
self.arity = arity
def __eq__(self, other):
return isinstance(other, Export) and self.module == other.module and self.function == other.function and self.arity == other.arity
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return "#Fun<%s.%s.%d>" % (self.module, self.function, self.arity)
def __repr__(self):
return self.__str__()
| jjhoo/sudoku.rb | erlastic/types.py | Python | agpl-3.0 | 4,125 |
#!/usr/bin/env python
import sys
import re
import math
import scipy
import scipy.stats
def extract_data(data_files):
unormalised_concurrence = []
entropy = []
trace = []
entropy_regex = '^ # Unnormalised von Neumann entropy ='
concurrence_regex = '^ # Unnormalised concurrence ='
trace_regex = '^ # RDM trace ='
for data_file in data_files:
f = open(data_file)
for line in f:
if re.match(entropy_regex, line):
words = line.split()
entropy.append(float(words[-1]))
elif re.match(concurrence_regex, line):
words = line.split()
unormalised_concurrence.append(float(words[-1]))
elif re.match(trace_regex, line):
words = line.split()
trace.append(float(words[-1]))
f.close()
return entropy, unormalised_concurrence, trace
def calculate_covariance(numerator, numerator_mean, trace, trace_mean):
cov = 0
for i in range(len(trace)):
cov += (numerator[i] - numerator_mean)*(trace[i] - trace_mean)
return cov/(len(trace)*(len(trace)-1))
def calculate_stats_ratio(numerator, trace):
numerator_mean = scipy.mean(numerator)
trace_mean = scipy.mean(trace)
numerator_se = scipy.stats.sem(numerator)
trace_se = scipy.stats.sem(trace)
mean = numerator_mean/trace_mean
cov = calculate_covariance(numerator, numerator_mean, trace, trace_mean)
error = scipy.sqrt((numerator_se/numerator_mean)**2 + (trace_se/trace_mean)**2 - (2*cov/(numerator_mean*trace_mean)) )*abs(mean)
return mean, error
def calculate_entropy_stats(numerator, trace):
numerator_mean = scipy.mean(numerator)
trace_mean = scipy.mean(trace)
numerator_se = scipy.stats.sem(numerator)
trace_se = scipy.stats.sem(trace)
mean = numerator_mean/trace_mean
cov = calculate_covariance(numerator, numerator_mean, trace, trace_mean)
error = scipy.sqrt((numerator_se*mean/numerator_mean)**2 + ((trace_se/trace_mean)**2)*((math.log(2)-mean)**2) - (2*cov/(numerator_mean*trace_mean))*(mean-math.log(2))*mean )
mean = mean + math.log(trace_mean, 2)
return mean, error
if __name__ == '__main__':
data_files = sys.argv[1:]
entropy, concurrence, trace = extract_data(data_files)
if len(entropy) > 0: entropy_mean, entropy_se = calculate_entropy_stats(entropy, trace)
if len(concurrence) > 0: concurrence_mean, concurrence_se = calculate_stats_ratio(concurrence, trace)
if len(entropy) > 0: print "Average Von Neumann Entropy = ", entropy_mean, " s.e. = ", entropy_se, " beta loops = ", len(entropy)
if len(concurrence) > 0: print "Average concurrence = ", concurrence_mean, " s.e. = ", concurrence_se, " beta loops = ", len(concurrence)
| ruthfranklin/hande | tools/dmqmc/average_entropy.py | Python | lgpl-2.1 | 2,794 |
# Generated by Django 2.2.6 on 2019-11-12 08:38
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('registration', '0011_betatestapplication_privacy_policy_url'),
# Make sure that data from the removed fields has been copied over before
# they are removed from here.
('userprofile', '0005_move_privacy_policy_subscribe_updates_data'),
]
operations = [
migrations.RemoveField(
model_name='betatestapplication',
name='accepted_privacy_policy',
),
migrations.RemoveField(
model_name='betatestapplication',
name='subscribe_to_updates',
),
]
| open-craft/opencraft | registration/migrations/0012_auto_20191112_0838.py | Python | agpl-3.0 | 709 |
from mitmproxy import contentviews
from mitmproxy.test import tflow
from mitmproxy.test import tutils
from mitmproxy.test import taddons
from mitmproxy.net.http import Headers
from ..mitmproxy import tservers
class TestScripts(tservers.MasterTest):
def test_add_header(self, tdata):
with taddons.context() as tctx:
a = tctx.script(tdata.path("../examples/simple/add_header.py"))
f = tflow.tflow(resp=tutils.tresp())
a.response(f)
assert f.response.headers["newheader"] == "foo"
def test_custom_contentviews(self, tdata):
with taddons.context() as tctx:
tctx.script(tdata.path("../examples/simple/custom_contentview.py"))
swapcase = contentviews.get("swapcase")
_, fmt = swapcase(b"<html>Test!</html>")
assert any(b'tEST!' in val[0][1] for val in fmt)
def test_iframe_injector(self, tdata):
with taddons.context() as tctx:
sc = tctx.script(tdata.path("../examples/simple/modify_body_inject_iframe.py"))
tctx.configure(
sc,
iframe = "http://example.org/evil_iframe"
)
f = tflow.tflow(
resp=tutils.tresp(content=b"<html><body>mitmproxy</body></html>")
)
tctx.master.addons.invoke_addon(sc, "response", f)
content = f.response.content
assert b'iframe' in content and b'evil_iframe' in content
def test_modify_form(self, tdata):
with taddons.context() as tctx:
sc = tctx.script(tdata.path("../examples/simple/modify_form.py"))
form_header = Headers(content_type="application/x-www-form-urlencoded")
f = tflow.tflow(req=tutils.treq(headers=form_header))
sc.request(f)
assert f.request.urlencoded_form["mitmproxy"] == "rocks"
f.request.headers["content-type"] = ""
sc.request(f)
assert list(f.request.urlencoded_form.items()) == [("foo", "bar")]
def test_modify_querystring(self, tdata):
with taddons.context() as tctx:
sc = tctx.script(tdata.path("../examples/simple/modify_querystring.py"))
f = tflow.tflow(req=tutils.treq(path="/search?q=term"))
sc.request(f)
assert f.request.query["mitmproxy"] == "rocks"
f.request.path = "/"
sc.request(f)
assert f.request.query["mitmproxy"] == "rocks"
def test_redirect_requests(self, tdata):
with taddons.context() as tctx:
sc = tctx.script(tdata.path("../examples/simple/redirect_requests.py"))
f = tflow.tflow(req=tutils.treq(host="example.org"))
sc.request(f)
assert f.request.host == "mitmproxy.org"
def test_send_reply_from_proxy(self, tdata):
with taddons.context() as tctx:
sc = tctx.script(tdata.path("../examples/simple/send_reply_from_proxy.py"))
f = tflow.tflow(req=tutils.treq(host="example.com", port=80))
sc.request(f)
assert f.response.content == b"Hello World"
def test_dns_spoofing(self, tdata):
with taddons.context() as tctx:
sc = tctx.script(tdata.path("../examples/complex/dns_spoofing.py"))
original_host = "example.com"
host_header = Headers(host=original_host)
f = tflow.tflow(req=tutils.treq(headers=host_header, port=80))
tctx.master.addons.invoke_addon(sc, "requestheaders", f)
# Rewrite by reverse proxy mode
f.request.scheme = "https"
f.request.port = 443
tctx.master.addons.invoke_addon(sc, "request", f)
assert f.request.scheme == "http"
assert f.request.port == 80
assert f.request.headers["Host"] == original_host
| ujjwal96/mitmproxy | test/examples/test_examples.py | Python | mit | 3,859 |
# Adapted from unittest2/loader.py from the unittest2 plugins branch.
# This module contains some code copied from unittest2/loader.py and other
# code developed in reference to that module and others within unittest2.
# unittest2 is Copyright (c) 2001-2010 Python Software Foundation; All
# Rights Reserved. See: http://docs.python.org/license.html
import logging
import traceback
import six
import unittest
from nose2 import events
log = logging.getLogger(__name__)
__unittest = True
class PluggableTestLoader(object):
"""Test loader that defers all loading to plugins
:param session: Test run session.
.. attribute :: suiteClass
Suite class to use. Default: :class:`unittest.TestSuite`.
"""
suiteClass = unittest.TestSuite
def __init__(self, session):
self.session = session
def loadTestsFromModule(self, module):
"""Load tests from module.
Fires :func:`loadTestsFromModule` hook.
"""
evt = events.LoadFromModuleEvent(self, module)
result = self.session.hooks.loadTestsFromModule(evt)
if evt.handled:
suite = result or self.suiteClass()
else:
suite = self.suiteClass(evt.extraTests)
filterevt = events.ModuleSuiteEvent(self, module, suite)
result = self.session.hooks.moduleLoadedSuite(filterevt)
if result:
return result or self.suiteClass()
return filterevt.suite
def loadTestsFromNames(self, testNames, module=None):
"""Load tests from test names.
Fires :func:`loadTestsFromNames` hook.
"""
event = events.LoadFromNamesEvent(
self, testNames, module)
result = self.session.hooks.loadTestsFromNames(event)
log.debug('loadTestsFromNames event %s result %s', event, result)
if event.handled:
suites = result or []
else:
if event.names:
suites = [self.loadTestsFromName(name, module)
for name in event.names]
elif module:
suites = self.loadTestsFromModule(module)
if event.extraTests:
suites.extend(event.extraTests)
return self.suiteClass(suites)
def loadTestsFromName(self, name, module=None):
"""Load tests from test name.
Fires :func:`loadTestsFromName` hook.
"""
log.debug('loadTestsFromName %s/%s', name, module)
event = events.LoadFromNameEvent(self, name, module)
result = self.session.hooks.loadTestsFromName(event)
if event.handled:
suite = result or self.suiteClass()
return suite
return self.suiteClass(event.extraTests)
def failedImport(self, name):
"""Make test case representing a failed import."""
message = 'Failed to import test module: %s' % name
if hasattr(traceback, 'format_exc'):
# Python 2.3 compatibility
# format_exc returns two frames of discover.py as well XXX ?
message += '\n%s' % traceback.format_exc()
return self._makeFailedTest(
'ModuleImportFailure', name, ImportError(message))
def failedLoadTests(self, name, exception):
"""Make test case representing a failed test load."""
return self._makeFailedTest('LoadTestsFailure', name, exception)
def sortTestMethodsUsing(self, name):
"""Sort key for test case test methods."""
return name.lower()
def discover(self, start_dir=None, pattern=None):
"""Compatibility shim for ``load_tests`` protocol."""
try:
oldsd = self.session.startDir
self.session.startDir = start_dir
return self.loadTestsFromNames([])
finally:
self.session.startDir = oldsd
def _makeFailedTest(self, classname, methodname, exception):
def testFailure(self):
if isinstance(exception, Exception):
raise exception
else:
# exception tuple (type, value, traceback)
six.reraise(*exception)
attrs = {methodname: testFailure}
TestClass = type(classname, (unittest.TestCase,), attrs)
return self.suiteClass((TestClass(methodname),))
def __repr__(self):
return '<%s>' % self.__class__.__name__
| ptthiem/nose2 | nose2/loader.py | Python | bsd-2-clause | 4,345 |
import requests
import json
import os
from socket import TagoRealTime
API_TAGO = os.environ.get('TAGO_SERVER') or 'https://api.tago.io'
REALTIME = os.environ.get('TAGO_REALTIME') or 'https://realtime.tago.io'
class Device:
def __init__(self, token):
self.token = token
self.default_headers = { 'content-type': 'application/json', 'Device-Token': token }
def handle_url(self, id):
if id:
url = '{api_endpoint}/data/{id}'.format(api_endpoint=API_TAGO,id=id)
else:
url = '{api_endpoint}/data'.format(api_endpoint=API_TAGO,id=id)
return url
def api_data_post(self, data):
return requests.post(self.handle_url(id=False), data=json.dumps(data), headers=self.default_headers).json()
def api_data_get(self, query):
return requests.get(self.handle_url(id=False), params=query, headers=self.default_headers).json()
def api_data_delete(self, id):
return requests.delete(self.handle_url(id=id), headers=self.default_headers).json()
def info(self, data):
return requests.get('{api_endpoint}/info'.format(api_endpoint=API_TAGO), headers=self.default_headers).json()
def insert(self, data):
return self.api_data_post(data)
def find(self, query):
return self.api_data_get(query)
def remove(self, id=False):
return self.api_data_delete(id)
def getParams(self, sent_status):
return requests.get('{api_endpoint}/device/params'.format(api_endpoint=API_TAGO), params=sent_status, headers=self.default_headers).json()
def markParam(self, param_id):
return requests.put('{api_endpoint}/device/params/{id}'.format(api_endpoint=API_TAGO,id=param_id), headers=self.default_headers).json()
def listening(self, callback, wait=False):
return TagoRealTime(REALTIME, self.token, callback).listening(wait)
| madhavbhatt/tago-sdk-python | tago/device/__init__.py | Python | apache-2.0 | 1,879 |
"""
Comsystem command module.
Comm commands are OOC commands and intended to be made available to
the Player at all times (they go into the PlayerCmdSet). So we
make sure to homogenize self.caller to always be the player object
for easy handling.
"""
from django.conf import settings
from src.comms.models import ChannelDB, Msg
#from src.comms import irc, imc2, rss
from src.players.models import PlayerDB
from src.players import bots
from src.comms.channelhandler import CHANNELHANDLER
from src.utils import create, utils, prettytable, evtable
from src.utils.utils import make_iter
from src.commands.default.muxcommand import MuxCommand, MuxPlayerCommand
# limit symbol import for API
__all__ = ("CmdAddCom", "CmdDelCom", "CmdAllCom",
"CmdChannels", "CmdCdestroy", "CmdCBoot", "CmdCemit",
"CmdCWho", "CmdChannelCreate", "CmdClock", "CmdCdesc",
"CmdPage", "CmdIRC2Chan", "CmdRSS2Chan")#, "CmdIMC2Chan", "CmdIMCInfo",
#"CmdIMCTell")
def find_channel(caller, channelname, silent=False, noaliases=False):
"""
Helper function for searching for a single channel with
some error handling.
"""
channels = ChannelDB.objects.channel_search(channelname)
if not channels:
if not noaliases:
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if channelname in chan.aliases.all()]
if channels:
return channels[0]
if not silent:
caller.msg("Channel '%s' not found." % channelname)
return None
elif len(channels) > 1:
matches = ", ".join(["%s(%s)" % (chan.key, chan.id) for chan in channels])
if not silent:
caller.msg("Multiple channels match (be more specific): \n%s" % matches)
return None
return channels[0]
class CmdAddCom(MuxPlayerCommand):
"""
add a channel alias and/or subscribe to a channel
Usage:
addcom [alias=] <channel>
Joins a given channel. If alias is given, this will allow you to
refer to the channel by this alias rather than the full channel
name. Subsequent calls of this command can be used to add multiple
aliases to an already joined channel.
"""
key = "addcom"
aliases = ["aliaschan", "chanalias"]
help_category = "Comms"
locks = "cmd:not pperm(channel_banned)"
def func(self):
"Implement the command"
caller = self.caller
args = self.args
player = caller
if not args:
self.msg("Usage: addcom [alias =] channelname.")
return
if self.rhs:
# rhs holds the channelname
channelname = self.rhs
alias = self.lhs
else:
channelname = self.args
alias = None
channel = find_channel(caller, channelname)
if not channel:
# we use the custom search method to handle errors.
return
# check permissions
if not channel.access(player, 'listen'):
self.msg("%s: You are not allowed to listen to this channel." % channel.key)
return
string = ""
if not channel.has_connection(player):
# we want to connect as well.
if not channel.connect(player):
# if this would have returned True, the player is connected
self.msg("%s: You are not allowed to join this channel." % channel.key)
return
else:
string += "You now listen to the channel %s. " % channel.key
else:
string += "You are already connected to channel %s." % channel.key
if alias:
# create a nick and add it to the caller.
caller.nicks.add(alias, channel.key, category="channel")
string += " You can now refer to the channel %s with the alias '%s'."
self.msg(string % (channel.key, alias))
else:
string += " No alias added."
self.msg(string)
class CmdDelCom(MuxPlayerCommand):
"""
remove a channel alias and/or unsubscribe from channel
Usage:
delcom <alias or channel>
If the full channel name is given, unsubscribe from the
channel. If an alias is given, remove the alias but don't
unsubscribe.
"""
key = "delcom"
aliases = ["delaliaschan, delchanalias"]
help_category = "Comms"
locks = "cmd:not perm(channel_banned)"
def func(self):
"Implementing the command. "
caller = self.caller
player = caller
if not self.args:
self.msg("Usage: delcom <alias or channel>")
return
ostring = self.args.lower()
channel = find_channel(caller, ostring, silent=True, noaliases=True)
if channel:
# we have given a channel name - unsubscribe
if not channel.has_connection(player):
self.msg("You are not listening to that channel.")
return
chkey = channel.key.lower()
# find all nicks linked to this channel and delete them
for nick in [nick for nick in make_iter(caller.nicks.get(category="channel", return_obj=True))
if nick and nick.strvalue.lower() == chkey]:
nick.delete()
disconnect = channel.disconnect(player)
if disconnect:
self.msg("You stop listening to channel '%s'. Eventual aliases were removed." % channel.key)
return
else:
# we are removing a channel nick
channame = caller.nicks.get(key=ostring, category="channel")
channel = find_channel(caller, channame, silent=True)
if not channel:
self.msg("No channel with alias '%s' was found." % ostring)
else:
if caller.nicks.get(ostring, category="channel"):
caller.nicks.remove(ostring, category="channel")
self.msg("Your alias '%s' for channel %s was cleared." % (ostring, channel.key))
else:
self.msg("You had no such alias defined for this channel.")
class CmdAllCom(MuxPlayerCommand):
"""
perform admin operations on all channels
Usage:
allcom [on | off | who | destroy]
Allows the user to universally turn off or on all channels they are on,
as well as perform a 'who' for all channels they are on. Destroy deletes
all channels that you control.
Without argument, works like comlist.
"""
key = "allcom"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"Runs the function"
caller = self.caller
args = self.args
if not args:
caller.execute_cmd("@channels")
self.msg("(Usage: allcom on | off | who | destroy)")
return
if args == "on":
# get names of all channels available to listen to
# and activate them all
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if chan.access(caller, 'listen')]
for channel in channels:
caller.execute_cmd("addcom %s" % channel.key)
elif args == "off":
#get names all subscribed channels and disconnect from them all
channels = ChannelDB.objects.get_subscriptions(caller)
for channel in channels:
caller.execute_cmd("delcom %s" % channel.key)
elif args == "destroy":
# destroy all channels you control
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if chan.access(caller, 'control')]
for channel in channels:
caller.execute_cmd("@cdestroy %s" % channel.key)
elif args == "who":
# run a who, listing the subscribers on visible channels.
string = "\n{CChannel subscriptions{n"
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if chan.access(caller, 'listen')]
if not channels:
string += "No channels."
for channel in channels:
string += "\n{w%s:{n\n" % channel.key
subs = channel.db_subscriptions.all()
if subs:
string += " " + ", ".join([player.key for player in subs])
else:
string += " <None>"
self.msg(string.strip())
else:
# wrong input
self.msg("Usage: allcom on | off | who | clear")
class CmdChannels(MuxPlayerCommand):
"""
list all channels available to you
Usage:
@channels
@clist
comlist
Lists all channels available to you, whether you listen to them or not.
Use 'comlist' to only view your current channel subscriptions.
Use addcom/delcom to join and leave channels
"""
key = "@channels"
aliases = ["@clist", "channels", "comlist", "chanlist", "channellist", "all channels"]
help_category = "Comms"
locks = "cmd: not pperm(channel_banned)"
def func(self):
"Implement function"
caller = self.caller
# all channels we have available to listen to
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if chan.access(caller, 'listen')]
#print channels
if not channels:
self.msg("No channels available.")
return
# all channel we are already subscribed to
subs = ChannelDB.objects.get_subscriptions(caller)
#print subs
if self.cmdstring == "comlist":
# just display the subscribed channels with no extra info
comtable = evtable.EvTable("{wchannel{n", "{wmy aliases{n", "{wdescription{n", align="l", maxwidth=78)
#comtable = prettytable.PrettyTable(["{wchannel", "{wmy aliases", "{wdescription"])
for chan in subs:
clower = chan.key.lower()
nicks = caller.nicks.get(category="channel")
comtable.add_row(*["%s%s" % (chan.key, chan.aliases.all() and
"(%s)" % ",".join(chan.aliases.all()) or ""),
"%s".join(nick for nick in make_iter(nicks)
if nick and nick.lower() == clower),
chan.db.desc])
caller.msg("\n{wChannel subscriptions{n (use {w@channels{n to list all, {waddcom{n/{wdelcom{n to sub/unsub):{n\n%s" % comtable)
else:
# full listing (of channels caller is able to listen to)
comtable = evtable.EvTable("{wsub{n", "{wchannel{n", "{wmy aliases{n", "{wlocks{n", "{wdescription{n", maxwidth=78)
#comtable = prettytable.PrettyTable(["{wsub", "{wchannel", "{wmy aliases", "{wlocks", "{wdescription"])
for chan in channels:
clower = chan.key.lower()
nicks = caller.nicks.get(category="channel")
nicks = nicks or []
comtable.add_row(*[chan in subs and "{gYes{n" or "{rNo{n",
"%s%s" % (chan.key, chan.aliases.all() and
"(%s)" % ",".join(chan.aliases.all()) or ""),
"%s".join(nick for nick in make_iter(nicks)
if nick.lower() == clower),
str(chan.locks),
chan.db.desc])
caller.msg("\n{wAvailable channels{n (use {wcomlist{n,{waddcom{n and {wdelcom{n to manage subscriptions):\n%s" % comtable)
class CmdCdestroy(MuxPlayerCommand):
"""
destroy a channel you created
Usage:
@cdestroy <channel>
Destroys a channel that you control.
"""
key = "@cdestroy"
help_category = "Comms"
locks = "cmd: not pperm(channel_banned)"
def func(self):
"Destroy objects cleanly."
caller = self.caller
if not self.args:
self.msg("Usage: @cdestroy <channelname>")
return
channel = find_channel(caller, self.args)
if not channel:
self.msg("Could not find channel %s." % self.args)
return
if not channel.access(caller, 'control'):
self.msg("You are not allowed to do that.")
return
channel_key = channel.key
message = "%s is being destroyed. Make sure to change your aliases." % channel_key
msgobj = create.create_message(caller, message, channel)
channel.msg(msgobj)
channel.delete()
CHANNELHANDLER.update()
self.msg("Channel '%s' was destroyed." % channel_key)
class CmdCBoot(MuxPlayerCommand):
"""
kick a player from a channel you control
Usage:
@cboot[/quiet] <channel> = <player> [:reason]
Switches:
quiet - don't notify the channel
Kicks a player or object from a channel you control.
"""
key = "@cboot"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"implement the function"
if not self.args or not self.rhs:
string = "Usage: @cboot[/quiet] <channel> = <player> [:reason]"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
reason = ""
if ":" in self.rhs:
playername, reason = self.rhs.rsplit(":", 1)
searchstring = playername.lstrip('*')
else:
searchstring = self.rhs.lstrip('*')
player = self.caller.search(searchstring, player=True)
if not player:
return
if reason:
reason = " (reason: %s)" % reason
if not channel.access(self.caller, "control"):
string = "You don't control this channel."
self.msg(string)
return
if not player.dbobj in channel.db_subscriptions.all():
string = "Player %s is not connected to channel %s." % (player.key, channel.key)
self.msg(string)
return
if not "quiet" in self.switches:
string = "%s boots %s from channel.%s" % (self.caller, player.key, reason)
channel.msg(string)
# find all player's nicks linked to this channel and delete them
for nick in [nick for nick in
player.character.nicks.get(category="channel") or []
if nick.db_real.lower() == channel.key]:
nick.delete()
# disconnect player
channel.disconnect(player)
CHANNELHANDLER.update()
class CmdCemit(MuxPlayerCommand):
"""
send an admin message to a channel you control
Usage:
@cemit[/switches] <channel> = <message>
Switches:
sendername - attach the sender's name before the message
quiet - don't echo the message back to sender
Allows the user to broadcast a message over a channel as long as
they control it. It does not show the user's name unless they
provide the /sendername switch.
"""
key = "@cemit"
aliases = ["@cmsg"]
locks = "cmd: not pperm(channel_banned) and pperm(Players)"
help_category = "Comms"
def func(self):
"Implement function"
if not self.args or not self.rhs:
string = "Usage: @cemit[/switches] <channel> = <message>"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
if not channel.access(self.caller, "control"):
string = "You don't control this channel."
self.msg(string)
return
message = self.rhs
if "sendername" in self.switches:
message = "%s: %s" % (self.key, message)
channel.msg(message)
if not "quiet" in self.switches:
string = "Sent to channel %s: %s" % (channel.key, message)
self.msg(string)
class CmdCWho(MuxPlayerCommand):
"""
show who is listening to a channel
Usage:
@cwho <channel>
List who is connected to a given channel you have access to.
"""
key = "@cwho"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"implement function"
if not self.args:
string = "Usage: @cwho <channel>"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
if not channel.access(self.caller, "listen"):
string = "You can't access this channel."
self.msg(string)
return
string = "\n{CChannel subscriptions{n"
string += "\n{w%s:{n\n" % channel.key
subs = channel.db_subscriptions.all()
if subs:
string += " " + ", ".join([player.key for player in subs])
else:
string += " <None>"
self.msg(string.strip())
class CmdChannelCreate(MuxPlayerCommand):
"""
create a new channel
Usage:
@ccreate <new channel>[;alias;alias...] = description
Creates a new channel owned by you.
"""
key = "@ccreate"
aliases = "channelcreate"
locks = "cmd:not pperm(channel_banned) and pperm(Players)"
help_category = "Comms"
def func(self):
"Implement the command"
caller = self.caller
if not self.args:
self.msg("Usage @ccreate <channelname>[;alias;alias..] = description")
return
description = ""
if self.rhs:
description = self.rhs
lhs = self.lhs
channame = lhs
aliases = None
if ';' in lhs:
channame, aliases = [part.strip().lower()
for part in lhs.split(';', 1) if part.strip()]
aliases = [alias.strip().lower()
for alias in aliases.split(';') if alias.strip()]
channel = ChannelDB.objects.channel_search(channame)
if channel:
self.msg("A channel with that name already exists.")
return
# Create and set the channel up
lockstring = "send:all();listen:all();control:id(%s)" % caller.id
new_chan = create.create_channel(channame,
aliases,
description,
locks=lockstring)
new_chan.connect(caller)
self.msg("Created channel %s and connected to it." % new_chan.key)
class CmdClock(MuxPlayerCommand):
"""
change channel locks of a channel you control
Usage:
@clock <channel> [= <lockstring>]
Changes the lock access restrictions of a channel. If no
lockstring was given, view the current lock definitions.
"""
key = "@clock"
locks = "cmd:not pperm(channel_banned)"
aliases = ["@clock"]
help_category = "Comms"
def func(self):
"run the function"
if not self.args:
string = "Usage: @clock channel [= lockstring]"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
if not self.rhs:
# no =, so just view the current locks
string = "Current locks on %s:" % channel.key
string = "%s\n %s" % (string, channel.locks)
self.msg(string)
return
# we want to add/change a lock.
if not channel.access(self.caller, "control"):
string = "You don't control this channel."
self.msg(string)
return
# Try to add the lock
channel.locks.add(self.rhs)
string = "Lock(s) applied. "
string += "Current locks on %s:" % channel.key
string = "%s\n %s" % (string, channel.locks)
self.msg(string)
class CmdCdesc(MuxPlayerCommand):
"""
describe a channel you control
Usage:
@cdesc <channel> = <description>
Changes the description of the channel as shown in
channel lists.
"""
key = "@cdesc"
locks = "cmd:not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"Implement command"
caller = self.caller
if not self.rhs:
self.msg("Usage: @cdesc <channel> = <description>")
return
channel = find_channel(caller, self.lhs)
if not channel:
self.msg("Channel '%s' not found." % self.lhs)
return
#check permissions
if not channel.access(caller, 'control'):
self.msg("You cannot admin this channel.")
return
# set the description
channel.db.desc = self.rhs
channel.save()
self.msg("Description of channel '%s' set to '%s'." % (channel.key,
self.rhs))
class CmdPage(MuxPlayerCommand):
"""
send a private message to another player
Usage:
page[/switches] [<player>,<player>,... = <message>]
tell ''
page <number>
Switch:
last - shows who you last messaged
list - show your last <number> of tells/pages (default)
Send a message to target user (if online). If no
argument is given, you will get a list of your latest messages.
"""
key = "page"
aliases = ['tell']
locks = "cmd:not pperm(page_banned)"
help_category = "Comms"
def func(self):
"Implement function using the Msg methods"
# this is a MuxPlayerCommand, which means caller will be a Player.
caller = self.caller
# get the messages we've sent (not to channels)
pages_we_sent = Msg.objects.get_messages_by_sender(caller,
exclude_channel_messages=True)
# get last messages we've got
pages_we_got = Msg.objects.get_messages_by_receiver(caller)
if 'last' in self.switches:
if pages_we_sent:
recv = ",".join(obj.key for obj in pages_we_sent[-1].receivers)
self.msg("You last paged {c%s{n:%s" % (recv,
pages_we_sent[-1].message))
return
else:
self.msg("You haven't paged anyone yet.")
return
if not self.args or not self.rhs:
pages = pages_we_sent + pages_we_got
pages.sort(lambda x, y: cmp(x.date_sent, y.date_sent))
number = 5
if self.args:
try:
number = int(self.args)
except ValueError:
self.msg("Usage: tell [<player> = msg]")
return
if len(pages) > number:
lastpages = pages[-number:]
else:
lastpages = pages
template = "{w%s{n {c%s{n to {c%s{n: %s"
lastpages = "\n ".join(template %
(utils.datetime_format(page.date_sent),
",".join(obj.key for obj in page.senders),
"{n,{c ".join([obj.name for obj in page.receivers]),
page.message) for page in lastpages)
if lastpages:
string = "Your latest pages:\n %s" % lastpages
else:
string = "You haven't paged anyone yet."
self.msg(string)
return
# We are sending. Build a list of targets
if not self.lhs:
# If there are no targets, then set the targets
# to the last person we paged.
if pages_we_sent:
receivers = pages_we_sent[-1].receivers
else:
self.msg("Who do you want to page?")
return
else:
receivers = self.lhslist
recobjs = []
for receiver in set(receivers):
if isinstance(receiver, basestring):
pobj = caller.search(receiver)
elif hasattr(receiver, 'character'):
pobj = receiver
else:
self.msg("Who do you want to page?")
return
if pobj:
recobjs.append(pobj)
if not recobjs:
self.msg("Noone found to page.")
return
header = "{wPlayer{n {c%s{n {wpages:{n" % caller.key
message = self.rhs
# if message begins with a :, we assume it is a 'page-pose'
if message.startswith(":"):
message = "%s %s" % (caller.key, message.strip(':').strip())
# create the persistent message object
create.create_message(caller, message,
receivers=recobjs)
# tell the players they got a message.
received = []
rstrings = []
for pobj in recobjs:
if not pobj.access(caller, 'msg'):
rstrings.append("You are not allowed to page %s." % pobj)
continue
pobj.msg("%s %s" % (header, message))
if hasattr(pobj, 'sessions') and not pobj.sessions:
received.append("{C%s{n" % pobj.name)
rstrings.append("%s is offline. They will see your message if they list their pages later." % received[-1])
else:
received.append("{c%s{n" % pobj.name)
if rstrings:
self.msg("\n".join(rstrings))
self.msg("You paged %s with: '%s'." % (", ".join(received), message))
class CmdIRC2Chan(MuxCommand):
"""
link an evennia channel to an external IRC channel
Usage:
@irc2chan[/switches] <evennia_channel> = <ircnetwork> <port> <#irchannel> <botname>
@irc2chan/list
@irc2chan/delete botname|#dbid
Switches:
/delete - this will delete the bot and remove the irc connection
to the channel.
/remove - "
/disconnect - "
/list - show all irc<->evennia mappings
Example:
@irc2chan myircchan = irc.dalnet.net 6667 myevennia-channel evennia-bot
This creates an IRC bot that connects to a given IRC network and channel.
It will relay everything said in the evennia channel to the IRC channel and
vice versa. The bot will automatically connect at server start, so this
comman need only be given once. The /disconnect switch will permanently
delete the bot. To only temporarily deactivate it, use the {w@services{n
command instead.
"""
key = "@irc2chan"
locks = "cmd:serversetting(IRC_ENABLED) and pperm(Immortals)"
help_category = "Comms"
def func(self):
"Setup the irc-channel mapping"
if not settings.IRC_ENABLED:
string = """IRC is not enabled. You need to activate it in game/settings.py."""
self.msg(string)
return
if 'list' in self.switches:
# show all connections
ircbots = [bot.typeclass for bot in PlayerDB.objects.filter(db_is_bot=True, username__startswith="ircbot-")]
if ircbots:
from src.utils.evtable import EvTable
table = EvTable("{wdbid{n", "{wbotname{n", "{wev-channel{n", "{wirc-channel{n", maxwidth=78)
for ircbot in ircbots:
ircinfo = "%s (%s:%s)" % (ircbot.db.irc_channel, ircbot.db.irc_network, ircbot.db.irc_port)
table.add_row(ircbot.id, ircbot.db.irc_botname, ircbot.db.ev_channel, ircinfo)
self.caller.msg(table)
else:
self.msg("No irc bots found.")
return
if('disconnect' in self.switches or 'remove' in self.switches or
'delete' in self.switches):
botname = "ircbot-%s" % self.lhs
matches = PlayerDB.objects.filter(db_is_bot=True, username=botname)
dbref = utils.dbref(self.lhs)
if not matches and dbref:
# try dbref match
matches = PlayerDB.objects.filter(db_is_bot=True, id=dbref)
if matches:
matches[0].delete()
self.msg("IRC connection destroyed.")
else:
self.msg("IRC connection/bot could not be removed, does it exist?")
return
if not self.args or not self.rhs:
string = "Usage: @irc2chan[/switches] <evennia_channel> = <ircnetwork> <port> <#irchannel> <botname>"
self.msg(string)
return
channel = self.lhs
self.rhs = self.rhs.replace('#', ' ') # to avoid Python comment issues
try:
irc_network, irc_port, irc_channel, irc_botname = \
[part.strip() for part in self.rhs.split(None, 3)]
irc_channel = "#%s" % irc_channel
except Exception:
string = "IRC bot definition '%s' is not valid." % self.rhs
self.msg(string)
return
botname = "ircbot-%s" % irc_botname
# create a new bot
bot = PlayerDB.objects.filter(username__iexact=botname)
if bot:
# re-use an existing bot
bot = bot[0].typeclass
if not bot.is_bot:
self.msg("Player '%s' already exists and is not a bot." % botname)
return
else:
bot = create.create_player(botname, None, None, typeclass=bots.IRCBot)
bot.start(ev_channel=channel, irc_botname=irc_botname, irc_channel=irc_channel,
irc_network=irc_network, irc_port=irc_port)
self.msg("Connection created. Starting IRC bot.")
# RSS connection
class CmdRSS2Chan(MuxCommand):
"""
link an evennia channel to an external RSS feed
Usage:
@rss2chan[/switches] <evennia_channel> = <rss_url>
Switches:
/disconnect - this will stop the feed and remove the connection to the
channel.
/remove - "
/list - show all rss->evennia mappings
Example:
@rss2chan rsschan = http://code.google.com/feeds/p/evennia/updates/basic
This creates an RSS reader that connects to a given RSS feed url. Updates
will be echoed as a title and news link to the given channel. The rate of
updating is set with the RSS_UPDATE_INTERVAL variable in settings (default
is every 10 minutes).
When disconnecting you need to supply both the channel and url again so as
to identify the connection uniquely.
"""
key = "@rss2chan"
locks = "cmd:serversetting(RSS_ENABLED) and pperm(Immortals)"
help_category = "Comms"
def func(self):
"Setup the rss-channel mapping"
# checking we have all we need
if not settings.RSS_ENABLED:
string = """RSS is not enabled. You need to activate it in game/settings.py."""
self.msg(string)
return
try:
import feedparser
feedparser # to avoid checker error of not being used
except ImportError:
string = ("RSS requires python-feedparser (https://pypi.python.org/pypi/feedparser). "
"Install before continuing.")
self.msg(string)
return
if 'list' in self.switches:
# show all connections
rssbots = [bot.typeclass for bot in PlayerDB.objects.filter(db_is_bot=True, username__startswith="rssbot-")]
if rssbots:
from src.utils.evtable import EvTable
table = EvTable("{wdbid{n", "{wupdate rate{n", "{wev-channel", "{wRSS feed URL{n", border="cells", maxwidth=78)
for rssbot in rssbots:
table.add_row(rssbot.id, rssbot.db.rss_rate, rssbot.db.ev_channel, rssbot.db.rss_url)
self.caller.msg(table)
else:
self.msg("No rss bots found.")
return
if('disconnect' in self.switches or 'remove' in self.switches or
'delete' in self.switches):
botname = "rssbot-%s" % self.lhs
matches = PlayerDB.objects.filter(db_is_bot=True, db_key=botname)
if not matches:
# try dbref match
matches = PlayerDB.objects.filter(db_is_bot=True, id=self.args.lstrip("#"))
if matches:
matches[0].delete()
self.msg("RSS connection destroyed.")
else:
self.msg("RSS connection/bot could not be removed, does it exist?")
return
if not self.args or not self.rhs:
string = "Usage: @rss2chan[/switches] <evennia_channel> = <rss url>"
self.msg(string)
return
channel = self.lhs
url = self.rhs
botname = "rssbot-%s" % url
# create a new bot
bot = PlayerDB.objects.filter(username__iexact=botname)
if bot:
# re-use existing bot
bot = bot[0].typeclass
if not bot.is_bot:
self.msg("Player '%s' already exists and is not a bot." % botname)
return
else:
bot = create.create_player(botname, None, None, typeclass=bots.RSSBot)
bot.start(ev_channel=channel, rss_url=url, rss_rate=10)
self.msg("RSS reporter created. Fetching RSS.")
#class CmdIMC2Chan(MuxCommand):
# """
# link an evennia channel to an external IMC2 channel
#
# Usage:
# @imc2chan[/switches] <evennia_channel> = <imc2_channel>
#
# Switches:
# /disconnect - this clear the imc2 connection to the channel.
# /remove - "
# /list - show all imc2<->evennia mappings
#
# Example:
# @imc2chan myimcchan = ievennia
#
# Connect an existing evennia channel to a channel on an IMC2
# network. The network contact information is defined in settings and
# should already be accessed at this point. Use @imcchanlist to see
# available IMC channels.
#
# """
#
# key = "@imc2chan"
# locks = "cmd:serversetting(IMC2_ENABLED) and pperm(Immortals)"
# help_category = "Comms"
#
# def func(self):
# "Setup the imc-channel mapping"
#
# if not settings.IMC2_ENABLED:
# string = """IMC is not enabled. You need to activate it in game/settings.py."""
# self.msg(string)
# return
#
# if 'list' in self.switches:
# # show all connections
# connections = ExternalChannelConnection.objects.filter(db_external_key__startswith='imc2_')
# if connections:
# table = prettytable.PrettyTable(["Evennia channel", "IMC channel"])
# for conn in connections:
# table.add_row([conn.channel.key, conn.external_config])
# string = "{wIMC connections:{n\n%s" % table
# self.msg(string)
# else:
# self.msg("No connections found.")
# return
#
# if not self.args or not self.rhs:
# string = "Usage: @imc2chan[/switches] <evennia_channel> = <imc2_channel>"
# self.msg(string)
# return
#
# channel = self.lhs
# imc2_channel = self.rhs
#
# if('disconnect' in self.switches or 'remove' in self.switches or
# 'delete' in self.switches):
# # we don't search for channels before this since we want
# # to clear the link also if the channel no longer exists.
# ok = imc2.delete_connection(channel, imc2_channel)
# if not ok:
# self.msg("IMC2 connection could not be removed, does it exist?")
# else:
# self.msg("IMC2 connection destroyed.")
# return
#
# # actually get the channel object
# channel = find_channel(self.caller, channel)
# if not channel:
# return
#
# ok = imc2.create_connection(channel, imc2_channel)
# if not ok:
# self.msg("The connection %s <-> %s already exists." % (channel.key, imc2_channel))
# return
# self.msg("Created connection channel %s <-> IMC channel %s." % (channel.key, imc2_channel))
#
#
#class CmdIMCInfo(MuxCommand):
# """
# get various IMC2 information
#
# Usage:
# @imcinfo[/switches]
# @imcchanlist - list imc2 channels
# @imclist - list connected muds
# @imcwhois <playername> - whois info about a remote player
#
# Switches for @imcinfo:
# channels - as @imcchanlist (default)
# games or muds - as @imclist
# whois - as @imcwhois (requires an additional argument)
# update - force an update of all lists
#
# Shows lists of games or channels on the IMC2 network.
# """
#
# key = "@imcinfo"
# aliases = ["@imcchanlist", "@imclist", "@imcwhois"]
# locks = "cmd: serversetting(IMC2_ENABLED) and pperm(Wizards)"
# help_category = "Comms"
#
# def func(self):
# "Run the command"
#
# if not settings.IMC2_ENABLED:
# string = """IMC is not enabled. You need to activate it in game/settings.py."""
# self.msg(string)
# return
#
# if "update" in self.switches:
# # update the lists
# import time
# from src.comms.imc2lib import imc2_packets as pck
# from src.comms.imc2 import IMC2_MUDLIST, IMC2_CHANLIST, IMC2_CLIENT
# # update connected muds
# IMC2_CLIENT.send_packet(pck.IMC2PacketKeepAliveRequest())
# # prune inactive muds
# for name, mudinfo in IMC2_MUDLIST.mud_list.items():
# if time.time() - mudinfo.last_updated > 3599:
# del IMC2_MUDLIST.mud_list[name]
# # update channel list
# IMC2_CLIENT.send_packet(pck.IMC2PacketIceRefresh())
# self.msg("IMC2 lists were re-synced.")
#
# elif("games" in self.switches or "muds" in self.switches
# or self.cmdstring == "@imclist"):
# # list muds
# from src.comms.imc2 import IMC2_MUDLIST
#
# muds = IMC2_MUDLIST.get_mud_list()
# networks = set(mud.networkname for mud in muds)
# string = ""
# nmuds = 0
# for network in networks:
# table = prettytable.PrettyTable(["Name", "Url", "Host", "Port"])
# for mud in (mud for mud in muds if mud.networkname == network):
# nmuds += 1
# table.add_row([mud.name, mud.url, mud.host, mud.port])
# string += "\n{wMuds registered on %s:{n\n%s" % (network, table)
# string += "\n %i Muds found." % nmuds
# self.msg(string)
#
# elif "whois" in self.switches or self.cmdstring == "@imcwhois":
# # find out about a player
# if not self.args:
# self.msg("Usage: @imcwhois <playername>")
# return
# from src.comms.imc2 import IMC2_CLIENT
# self.msg("Sending IMC whois request. If you receive no response, no matches were found.")
# IMC2_CLIENT.msg_imc2(None,
# from_obj=self.caller,
# packet_type="imcwhois",
# target=self.args)
#
# elif(not self.switches or "channels" in self.switches or
# self.cmdstring == "@imcchanlist"):
# # show channels
# from src.comms.imc2 import IMC2_CHANLIST, IMC2_CLIENT
#
# channels = IMC2_CHANLIST.get_channel_list()
# string = ""
# nchans = 0
# table = prettytable.PrettyTable(["Full name", "Name", "Owner", "Perm", "Policy"])
# for chan in channels:
# nchans += 1
# table.add_row([chan.name, chan.localname, chan.owner,
# chan.level, chan.policy])
# string += "\n{wChannels on %s:{n\n%s" % (IMC2_CLIENT.factory.network, table)
# string += "\n%i Channels found." % nchans
# self.msg(string)
# else:
# # no valid inputs
# string = "Usage: imcinfo|imcchanlist|imclist"
# self.msg(string)
#
#
## unclear if this is working ...
#class CmdIMCTell(MuxCommand):
# """
# send a page to a remote IMC player
#
# Usage:
# imctell User@MUD = <msg>
# imcpage "
#
# Sends a page to a user on a remote MUD, connected
# over IMC2.
# """
#
# key = "imctell"
# aliases = ["imcpage", "imc2tell", "imc2page"]
# locks = "cmd: serversetting(IMC2_ENABLED)"
# help_category = "Comms"
#
# def func(self):
# "Send tell across IMC"
#
# if not settings.IMC2_ENABLED:
# string = """IMC is not enabled. You need to activate it in game/settings.py."""
# self.msg(string)
# return
#
# from src.comms.imc2 import IMC2_CLIENT
#
# if not self.args or not '@' in self.lhs or not self.rhs:
# string = "Usage: imctell User@Mud = <msg>"
# self.msg(string)
# return
# target, destination = self.lhs.split("@", 1)
# message = self.rhs.strip()
# data = {"target":target, "destination":destination}
#
# # send to imc2
# IMC2_CLIENT.msg_imc2(message, from_obj=self.caller, packet_type="imctell", **data)
#
# self.msg("You paged {c%s@%s{n (over IMC): '%s'." % (target, destination, message))
#
#
| GhostshipSoftware/avaloria | src/commands/default/comms.py | Python | bsd-3-clause | 41,902 |
#
# Copyright (C) 2009-2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Dave Lehman <dlehman@redhat.com>
#
"""This module provides functions related to OS installation."""
import shlex
import os
import stat
import time
import gi
gi.require_version("BlockDev", "1.0")
from gi.repository import BlockDev as blockdev
from . import util
from . import get_sysroot, get_target_physical_root, error_handler, ERROR_RAISE
from .storage_log import log_exception_info
from .devices import FileDevice, NFSDevice, NoDevice, OpticalDevice, NetworkStorageDevice, DirectoryDevice, MDRaidArrayDevice
from .errors import FSTabTypeMismatchError, UnrecognizedFSTabEntryError, StorageError, FSResizeError, FormatResizeError, UnknownSourceDeviceError
from .formats import get_device_format_class
from .formats import get_format
from .flags import flags
from .platform import platform as _platform
from .platform import EFI
from .i18n import _
import logging
log = logging.getLogger("blivet")
def release_from_redhat_release(fn):
"""
Attempt to identify the installation of a Linux distribution via
/etc/redhat-release. This file must already have been verified to exist
and be readable.
:param fn: an open filehandle on /etc/redhat-release
:type fn: filehandle
:returns: The distribution's name and version, or None for either or both
if they cannot be determined
:rtype: (string, string)
"""
rel_name = None
rel_ver = None
with open(fn) as f:
try:
relstr = f.readline().strip()
except (IOError, AttributeError):
relstr = ""
# get the release name and version
# assumes that form is something
# like "Red Hat Linux release 6.2 (Zoot)"
(product, sep, version) = relstr.partition(" release ")
if sep:
rel_name = product
rel_ver = version.split()[0]
return (rel_name, rel_ver)
def release_from_os_release(fn):
"""
Attempt to identify the installation of a Linux distribution via
/etc/os-release. This file must already have been verified to exist
and be readable.
:param fn: an open filehandle on /etc/os-release
:type fn: filehandle
:returns: The distribution's name and version, or None for either or both
if they cannot be determined
:rtype: (string, string)
"""
rel_name = None
rel_ver = None
with open(fn, "r") as f:
parser = shlex.shlex(f)
while True:
key = parser.get_token()
if key == parser.eof:
break
elif key == "NAME":
# Throw away the "=".
parser.get_token()
rel_name = parser.get_token().strip("'\"")
elif key == "VERSION_ID":
# Throw away the "=".
parser.get_token()
rel_ver = parser.get_token().strip("'\"")
return (rel_name, rel_ver)
def get_release_string():
"""
Attempt to identify the installation of a Linux distribution by checking
a previously mounted filesystem for several files. The filesystem must
be mounted under the target physical root.
:returns: The machine's arch, distribution name, and distribution version
or None for any parts that cannot be determined
:rtype: (string, string, string)
"""
rel_name = None
rel_ver = None
try:
rel_arch = util.capture_output(["arch"], root=get_sysroot()).strip()
except OSError:
rel_arch = None
filename = "%s/etc/redhat-release" % get_sysroot()
if os.access(filename, os.R_OK):
(rel_name, rel_ver) = release_from_redhat_release(filename)
else:
filename = "%s/etc/os-release" % get_sysroot()
if os.access(filename, os.R_OK):
(rel_name, rel_ver) = release_from_os_release(filename)
return (rel_arch, rel_name, rel_ver)
def parse_fstab(devicetree, chroot=None):
""" parse /etc/fstab and return a tuple of a mount dict and swap list """
if not chroot or not os.path.isdir(chroot):
chroot = get_sysroot()
mounts = {}
swaps = []
path = "%s/etc/fstab" % chroot
if not os.access(path, os.R_OK):
# XXX should we raise an exception instead?
log.info("cannot open %s for read", path)
return (mounts, swaps)
blkid_tab = BlkidTab(chroot=chroot)
try:
blkid_tab.parse()
log.debug("blkid.tab devs: %s", list(blkid_tab.devices.keys()))
except Exception: # pylint: disable=broad-except
log_exception_info(log.info, "error parsing blkid.tab")
blkid_tab = None
crypt_tab = CryptTab(devicetree, blkid_tab=blkid_tab, chroot=chroot)
try:
crypt_tab.parse(chroot=chroot)
log.debug("crypttab maps: %s", list(crypt_tab.mappings.keys()))
except Exception: # pylint: disable=broad-except
log_exception_info(log.info, "error parsing crypttab")
crypt_tab = None
with open(path) as f:
log.debug("parsing %s", path)
for line in f.readlines():
(line, _pound, _comment) = line.partition("#")
fields = line.split(None, 4)
if len(fields) < 5:
continue
(devspec, mountpoint, fstype, options, _rest) = fields
# find device in the tree
device = devicetree.resolve_device(devspec,
crypt_tab=crypt_tab,
blkid_tab=blkid_tab,
options=options)
if device is None:
continue
if fstype != "swap":
mounts[mountpoint] = device
else:
swaps.append(device)
return (mounts, swaps)
def find_existing_installations(devicetree, teardown_all=True):
"""Find existing GNU/Linux installations on devices from the devicetree.
:param devicetree: devicetree to find existing installations in
:type devicetree: :class:`~.devicetree.DeviceTree`
:param bool teardown_all: whether to tear down all devices in the
devicetree in the end
:return: roots of all found installations
:rtype: list of :class:`Root`
"""
try:
roots = _find_existing_installations(devicetree)
return roots
except Exception: # pylint: disable=broad-except
log_exception_info(log.info, "failure detecting existing installations")
finally:
if teardown_all:
devicetree.teardown_all()
return []
def _find_existing_installations(devicetree):
if not os.path.exists(get_target_physical_root()):
util.makedirs(get_target_physical_root())
roots = []
for device in devicetree.leaves:
if not device.format.linux_native or not device.format.mountable or \
not device.controllable:
continue
try:
device.setup()
except Exception: # pylint: disable=broad-except
log_exception_info(log.warning, "setup of %s failed", [device.name])
continue
options = device.format.options + ",ro"
try:
device.format.mount(options=options, mountpoint=get_sysroot())
except Exception: # pylint: disable=broad-except
log_exception_info(log.warning, "mount of %s as %s failed", [device.name, device.format.type])
util.umount(mountpoint=get_sysroot())
continue
if not os.access(get_sysroot() + "/etc/fstab", os.R_OK):
util.umount(mountpoint=get_sysroot())
device.teardown(recursive=True)
continue
try:
(architecture, product, version) = get_release_string()
except ValueError:
name = _("Linux on %s") % device.name
else:
# I'd like to make this finer grained, but it'd be very difficult
# to translate.
if not product or not version or not architecture:
name = _("Unknown Linux")
elif "linux" in product.lower():
name = _("%(product)s %(version)s for %(arch)s") % \
{"product": product, "version": version, "arch": architecture}
else:
name = _("%(product)s Linux %(version)s for %(arch)s") % \
{"product": product, "version": version, "arch": architecture}
(mounts, swaps) = parse_fstab(devicetree, chroot=get_sysroot())
util.umount(mountpoint=get_sysroot())
if not mounts and not swaps:
# empty /etc/fstab. weird, but I've seen it happen.
continue
roots.append(Root(mounts=mounts, swaps=swaps, name=name))
return roots
class FSSet(object):
""" A class to represent a set of filesystems. """
def __init__(self, devicetree):
self.devicetree = devicetree
self.crypt_tab = None
self.blkid_tab = None
self.orig_fstab = None
self.active = False
self._dev = None
self._devpts = None
self._sysfs = None
self._proc = None
self._devshm = None
self._usb = None
self._selinux = None
self._run = None
self._efivars = None
self._fstab_swaps = set()
self.preserve_lines = [] # lines we just ignore and preserve
@property
def sysfs(self):
if not self._sysfs:
self._sysfs = NoDevice(fmt=get_format("sysfs", device="sysfs", mountpoint="/sys"))
return self._sysfs
@property
def dev(self):
if not self._dev:
self._dev = DirectoryDevice("/dev",
fmt=get_format("bind", device="/dev", mountpoint="/dev", exists=True),
exists=True)
return self._dev
@property
def devpts(self):
if not self._devpts:
self._devpts = NoDevice(fmt=get_format("devpts", device="devpts", mountpoint="/dev/pts"))
return self._devpts
@property
def proc(self):
if not self._proc:
self._proc = NoDevice(fmt=get_format("proc", device="proc", mountpoint="/proc"))
return self._proc
@property
def devshm(self):
if not self._devshm:
self._devshm = NoDevice(fmt=get_format("tmpfs", device="tmpfs", mountpoint="/dev/shm"))
return self._devshm
@property
def usb(self):
if not self._usb:
self._usb = NoDevice(fmt=get_format("usbfs", device="usbfs", mountpoint="/proc/bus/usb"))
return self._usb
@property
def selinux(self):
if not self._selinux:
self._selinux = NoDevice(fmt=get_format("selinuxfs", device="selinuxfs", mountpoint="/sys/fs/selinux"))
return self._selinux
@property
def efivars(self):
if not self._efivars:
self._efivars = NoDevice(fmt=get_format("efivarfs", device="efivarfs", mountpoint="/sys/firmware/efi/efivars"))
return self._efivars
@property
def run(self):
if not self._run:
self._run = DirectoryDevice("/run",
fmt=get_format("bind", device="/run", mountpoint="/run", exists=True),
exists=True)
return self._run
@property
def devices(self):
return sorted(self.devicetree.devices, key=lambda d: d.path)
@property
def mountpoints(self):
filesystems = {}
for device in self.devices:
if device.format.mountable and device.format.mountpoint:
filesystems[device.format.mountpoint] = device
return filesystems
def _parse_one_line(self, devspec, mountpoint, fstype, options, _dump="0", _passno="0"):
"""Parse an fstab entry for a device, return the corresponding device.
The parameters correspond to the items in a single entry in the
order in which they occur in the entry.
:returns: the device corresponding to the entry
:rtype: :class:`devices.Device`
"""
# no sense in doing any legwork for a noauto entry
if "noauto" in options.split(","):
log.info("ignoring noauto entry")
raise UnrecognizedFSTabEntryError()
# find device in the tree
device = self.devicetree.resolve_device(devspec,
crypt_tab=self.crypt_tab,
blkid_tab=self.blkid_tab,
options=options)
if device:
# fall through to the bottom of this block
pass
elif devspec.startswith("/dev/loop"):
# FIXME: create devices.LoopDevice
log.warning("completely ignoring your loop mount")
elif ":" in devspec and fstype.startswith("nfs"):
# NFS -- preserve but otherwise ignore
device = NFSDevice(devspec,
fmt=get_format(fstype,
exists=True,
device=devspec))
elif devspec.startswith("/") and fstype == "swap":
# swap file
device = FileDevice(devspec,
parents=get_containing_device(devspec, self.devicetree),
fmt=get_format(fstype,
device=devspec,
exists=True),
exists=True)
elif fstype == "bind" or "bind" in options:
# bind mount... set fstype so later comparison won't
# turn up false positives
fstype = "bind"
# This is probably not going to do anything useful, so we'll
# make sure to try again from FSSet.mount_filesystems. The bind
# mount targets should be accessible by the time we try to do
# the bind mount from there.
parents = get_containing_device(devspec, self.devicetree)
device = DirectoryDevice(devspec, parents=parents, exists=True)
device.format = get_format("bind",
device=device.path,
exists=True)
elif mountpoint in ("/proc", "/sys", "/dev/shm", "/dev/pts",
"/sys/fs/selinux", "/proc/bus/usb", "/sys/firmware/efi/efivars"):
# drop these now -- we'll recreate later
return None
else:
# nodev filesystem -- preserve or drop completely?
fmt = get_format(fstype)
fmt_class = get_device_format_class("nodev")
if devspec == "none" or \
(fmt_class and isinstance(fmt, fmt_class)):
device = NoDevice(fmt=fmt)
if device is None:
log.error("failed to resolve %s (%s) from fstab", devspec,
fstype)
raise UnrecognizedFSTabEntryError()
device.setup()
fmt = get_format(fstype, device=device.path, exists=True)
if fstype != "auto" and None in (device.format.type, fmt.type):
log.info("Unrecognized filesystem type for %s (%s)",
device.name, fstype)
device.teardown()
raise UnrecognizedFSTabEntryError()
# make sure, if we're using a device from the tree, that
# the device's format we found matches what's in the fstab
ftype = getattr(fmt, "mount_type", fmt.type)
dtype = getattr(device.format, "mount_type", device.format.type)
if hasattr(fmt, "test_mount") and fstype != "auto" and ftype != dtype:
log.info("fstab says %s at %s is %s", dtype, mountpoint, ftype)
if fmt.test_mount(): # pylint: disable=no-member
device.format = fmt
else:
device.teardown()
raise FSTabTypeMismatchError("%s: detected as %s, fstab says %s"
% (mountpoint, dtype, ftype))
del ftype
del dtype
if hasattr(device.format, "mountpoint"):
device.format.mountpoint = mountpoint
device.format.options = options
return device
def parse_fstab(self, chroot=None):
""" parse /etc/fstab
preconditions:
all storage devices have been scanned, including filesystems
postconditions:
FIXME: control which exceptions we raise
XXX do we care about bind mounts?
how about nodev mounts?
loop mounts?
"""
if not chroot or not os.path.isdir(chroot):
chroot = get_sysroot()
path = "%s/etc/fstab" % chroot
if not os.access(path, os.R_OK):
# XXX should we raise an exception instead?
log.info("cannot open %s for read", path)
return
blkid_tab = BlkidTab(chroot=chroot)
try:
blkid_tab.parse()
log.debug("blkid.tab devs: %s", list(blkid_tab.devices.keys()))
except Exception: # pylint: disable=broad-except
log_exception_info(log.info, "error parsing blkid.tab")
blkid_tab = None
crypt_tab = CryptTab(self.devicetree, blkid_tab=blkid_tab, chroot=chroot)
try:
crypt_tab.parse(chroot=chroot)
log.debug("crypttab maps: %s", list(crypt_tab.mappings.keys()))
except Exception: # pylint: disable=broad-except
log_exception_info(log.info, "error parsing crypttab")
crypt_tab = None
self.blkid_tab = blkid_tab
self.crypt_tab = crypt_tab
with open(path) as f:
log.debug("parsing %s", path)
lines = f.readlines()
# save the original file
self.orig_fstab = ''.join(lines)
for line in lines:
(line, _pound, _comment) = line.partition("#")
fields = line.split()
if not 4 <= len(fields) <= 6:
continue
try:
device = self._parse_one_line(*fields)
except UnrecognizedFSTabEntryError:
# just write the line back out as-is after upgrade
self.preserve_lines.append(line)
continue
if not device:
continue
if device not in self.devicetree.devices:
try:
self.devicetree._add_device(device)
except ValueError:
# just write duplicates back out post-install
self.preserve_lines.append(line)
def turn_on_swap(self, root_path=""):
""" Activate the system's swap space. """
if not flags.installer_mode:
return
for device in self.swap_devices:
if isinstance(device, FileDevice):
# set up FileDevices' parents now that they are accessible
target_dir = "%s/%s" % (root_path, device.path)
parent = get_containing_device(target_dir, self.devicetree)
if not parent:
log.error("cannot determine which device contains "
"directory %s", device.path)
device.parents = []
self.devicetree._remove_device(device)
continue
else:
device.parents = [parent]
while True:
try:
device.setup()
device.format.setup()
except (StorageError, blockdev.BlockDevError) as e:
if error_handler.cb(e) == ERROR_RAISE:
raise
else:
break
def mount_filesystems(self, root_path="", read_only=None, skip_root=False):
""" Mount the system's filesystems.
:param str root_path: the root directory for this filesystem
:param read_only: read only option str for this filesystem
:type read_only: str or None
:param bool skip_root: whether to skip mounting the root filesystem
"""
if not flags.installer_mode:
return
devices = list(self.mountpoints.values()) + self.swap_devices
devices.extend([self.dev, self.devshm, self.devpts, self.sysfs,
self.proc, self.selinux, self.usb, self.run])
if isinstance(_platform, EFI):
devices.append(self.efivars)
devices.sort(key=lambda d: getattr(d.format, "mountpoint", ""))
for device in devices:
if not device.format.mountable or not device.format.mountpoint:
continue
if skip_root and device.format.mountpoint == "/":
continue
options = device.format.options
if "noauto" in options.split(","):
continue
if device.format.type == "bind" and device not in [self.dev, self.run]:
# set up the DirectoryDevice's parents now that they are
# accessible
#
# -- bind formats' device and mountpoint are always both
# under the chroot. no exceptions. none, damn it.
target_dir = "%s/%s" % (root_path, device.path)
parent = get_containing_device(target_dir, self.devicetree)
if not parent:
log.error("cannot determine which device contains "
"directory %s", device.path)
device.parents = []
self.devicetree._remove_device(device)
continue
else:
device.parents = [parent]
try:
device.setup()
except Exception as e: # pylint: disable=broad-except
log_exception_info(fmt_str="unable to set up device %s", fmt_args=[device])
if error_handler.cb(e) == ERROR_RAISE:
raise
else:
continue
if read_only:
options = "%s,%s" % (options, read_only)
try:
device.format.setup(options=options,
chroot=root_path)
except Exception as e: # pylint: disable=broad-except
log_exception_info(log.error, "error mounting %s on %s", [device.path, device.format.mountpoint])
if error_handler.cb(e) == ERROR_RAISE:
raise
self.active = True
def umount_filesystems(self, swapoff=True):
""" unmount filesystems, except swap if swapoff == False """
devices = list(self.mountpoints.values()) + self.swap_devices
devices.extend([self.dev, self.devshm, self.devpts, self.sysfs,
self.proc, self.usb, self.selinux, self.run])
if isinstance(_platform, EFI):
devices.append(self.efivars)
devices.sort(key=lambda d: getattr(d.format, "mountpoint", ""))
devices.reverse()
for device in devices:
if (not device.format.mountable) or \
(device.format.type == "swap" and not swapoff):
continue
# Unmount the devices
device.format.teardown()
self.active = False
def create_swap_file(self, device, size):
""" Create and activate a swap file under storage root. """
filename = "/SWAP"
count = 0
basedir = os.path.normpath("%s/%s" % (get_target_physical_root(),
device.format.mountpoint))
while os.path.exists("%s/%s" % (basedir, filename)) or \
self.devicetree.get_device_by_name(filename):
count += 1
filename = "/SWAP-%d" % count
dev = FileDevice(filename,
size=size,
parents=[device],
fmt=get_format("swap", device=filename))
dev.create()
dev.setup()
dev.format.create()
dev.format.setup()
# nasty, nasty
self.devicetree._add_device(dev)
def mk_dev_root(self):
root = self.root_device
dev = "%s/%s" % (get_sysroot(), root.path)
if not os.path.exists("%s/dev/root" % (get_sysroot(),)) and os.path.exists(dev):
rdev = os.stat(dev).st_rdev
os.mknod("%s/dev/root" % (get_sysroot(),), stat.S_IFBLK | 0o600, rdev)
@property
def swap_devices(self):
swaps = []
for device in self.devices:
if device.format.type == "swap":
swaps.append(device)
return swaps
@property
def root_device(self):
for path in ["/", get_target_physical_root()]:
for device in self.devices:
try:
mountpoint = device.format.mountpoint
except AttributeError:
mountpoint = None
if mountpoint == path:
return device
def write(self):
""" write out all config files based on the set of filesystems """
# /etc/fstab
fstab_path = os.path.normpath("%s/etc/fstab" % get_sysroot())
fstab = self.fstab()
open(fstab_path, "w").write(fstab)
# /etc/crypttab
crypttab_path = os.path.normpath("%s/etc/crypttab" % get_sysroot())
crypttab = self.crypttab()
origmask = os.umask(0o077)
open(crypttab_path, "w").write(crypttab)
os.umask(origmask)
# /etc/mdadm.conf
mdadm_path = os.path.normpath("%s/etc/mdadm.conf" % get_sysroot())
mdadm_conf = self.mdadm_conf()
if mdadm_conf:
open(mdadm_path, "w").write(mdadm_conf)
# /etc/multipath.conf
if any(d for d in self.devices if d.type == "dm-multipath"):
util.copy_to_system("/etc/multipath.conf")
util.copy_to_system("/etc/multipath/wwids")
util.copy_to_system("/etc/multipath/bindings")
else:
log.info("not writing out mpath configuration")
def crypttab(self):
# if we are upgrading, do we want to update crypttab?
# gut reaction says no, but plymouth needs the names to be very
# specific for passphrase prompting
if not self.crypt_tab:
self.crypt_tab = CryptTab(self.devicetree)
self.crypt_tab.populate()
devices = list(self.mountpoints.values()) + self.swap_devices
# prune crypttab -- only mappings required by one or more entries
for name in list(self.crypt_tab.mappings.keys()):
keep = False
map_info = self.crypt_tab[name]
crypto_dev = map_info['device']
for device in devices:
if device == crypto_dev or device.depends_on(crypto_dev):
keep = True
break
if not keep:
del self.crypt_tab.mappings[name]
return self.crypt_tab.crypttab()
def mdadm_conf(self):
""" Return the contents of mdadm.conf. """
arrays = [d for d in self.devices if isinstance(d, MDRaidArrayDevice)]
# Sort it, this not only looks nicer, but this will also put
# containers (which get md0, md1, etc.) before their members
# (which get md127, md126, etc.). and lame as it is mdadm will not
# assemble the whole stack in one go unless listed in the proper order
# in mdadm.conf
arrays.sort(key=lambda d: d.path)
if not arrays:
return ""
conf = "# mdadm.conf written out by anaconda\n"
conf += "MAILADDR root\n"
conf += "AUTO +imsm +1.x -all\n"
devices = list(self.mountpoints.values()) + self.swap_devices
for array in arrays:
for device in devices:
if device == array or device.depends_on(array):
conf += array.mdadm_conf_entry
break
return conf
def fstab(self):
fmt_str = "%-23s %-23s %-7s %-15s %d %d\n"
fstab = """
#
# /etc/fstab
# Created by anaconda on %s
#
# Accessible filesystems, by reference, are maintained under '/dev/disk'
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info
#
""" % time.asctime()
devices = sorted(self.mountpoints.values(),
key=lambda d: d.format.mountpoint)
# filter swaps only in installer mode
if flags.installer_mode:
devices += [dev for dev in self.swap_devices
if dev in self._fstab_swaps]
else:
devices += self.swap_devices
netdevs = [d for d in self.devices if isinstance(d, NetworkStorageDevice)]
rootdev = devices[0]
root_on_netdev = any(rootdev.depends_on(netdev) for netdev in netdevs)
for device in devices:
# why the hell do we put swap in the fstab, anyway?
if not device.format.mountable and device.format.type != "swap":
continue
# Don't write out lines for optical devices, either.
if isinstance(device, OpticalDevice):
continue
fstype = getattr(device.format, "mount_type", device.format.type)
if fstype == "swap":
mountpoint = "swap"
options = device.format.options
else:
mountpoint = device.format.mountpoint
options = device.format.options
if not mountpoint:
log.warning("%s filesystem on %s has no mountpoint",
fstype,
device.path)
continue
options = options or "defaults"
for netdev in netdevs:
if device.depends_on(netdev):
if root_on_netdev and mountpoint == "/var":
options = options + ",x-initrd.mount"
break
if device.encrypted:
options += ",x-systemd.device-timeout=0"
devspec = device.fstab_spec
dump = device.format.dump
if device.format.check and mountpoint == "/":
passno = 1
elif device.format.check:
passno = 2
else:
passno = 0
fstab = fstab + device.fstab_comment
fstab = fstab + fmt_str % (devspec, mountpoint, fstype,
options, dump, passno)
# now, write out any lines we were unable to process because of
# unrecognized filesystems or unresolveable device specifications
for line in self.preserve_lines:
fstab += line
return fstab
def add_fstab_swap(self, device):
"""
Add swap device to the list of swaps that should appear in the fstab.
:param device: swap device that should be added to the list
:type device: blivet.devices.StorageDevice instance holding a swap format
"""
self._fstab_swaps.add(device)
def remove_fstab_swap(self, device):
"""
Remove swap device from the list of swaps that should appear in the fstab.
:param device: swap device that should be removed from the list
:type device: blivet.devices.StorageDevice instance holding a swap format
"""
try:
self._fstab_swaps.remove(device)
except KeyError:
pass
def set_fstab_swaps(self, devices):
"""
Set swap devices that should appear in the fstab.
:param devices: iterable providing devices that should appear in the fstab
:type devices: iterable providing blivet.devices.StorageDevice instances holding
a swap format
"""
self._fstab_swaps = set(devices)
class Root(object):
""" A Root represents an existing OS installation. """
def __init__(self, mounts=None, swaps=None, name=None):
"""
:keyword mounts: mountpoint dict
:type mounts: dict (mountpoint keys and :class:`~.devices.StorageDevice` values)
:keyword swaps: swap device list
:type swaps: list of :class:`~.devices.StorageDevice`
:keyword name: name for this installed OS
:type name: str
"""
# mountpoint key, StorageDevice value
if not mounts:
self.mounts = {}
else:
self.mounts = mounts
# StorageDevice
if not swaps:
self.swaps = []
else:
self.swaps = swaps
self.name = name # eg: "Fedora Linux 16 for x86_64", "Linux on sda2"
if not self.name and "/" in self.mounts:
self.name = self.mounts["/"].format.uuid
@property
def device(self):
return self.mounts.get("/")
class BlkidTab(object):
""" Dictionary-like interface to blkid.tab with device path keys """
def __init__(self, chroot=""):
self.chroot = chroot
self.devices = {}
def parse(self):
path = "%s/etc/blkid/blkid.tab" % self.chroot
log.debug("parsing %s", path)
with open(path) as f:
for line in f.readlines():
# this is pretty ugly, but an XML parser is more work than
# is justifiable for this purpose
if not line.startswith("<device "):
continue
line = line[len("<device "):-len("</device>\n")]
(data, _sep, device) = line.partition(">")
if not device:
continue
self.devices[device] = {}
for pair in data.split():
try:
(key, value) = pair.split("=")
except ValueError:
continue
self.devices[device][key] = value[1:-1] # strip off quotes
def __getitem__(self, key):
return self.devices[key]
def get(self, key, default=None):
return self.devices.get(key, default)
class CryptTab(object):
""" Dictionary-like interface to crypttab entries with map name keys """
def __init__(self, devicetree, blkid_tab=None, chroot=""):
self.devicetree = devicetree
self.blkid_tab = blkid_tab
self.chroot = chroot
self.mappings = {}
def parse(self, chroot=""):
""" Parse /etc/crypttab from an existing installation. """
if not chroot or not os.path.isdir(chroot):
chroot = ""
path = "%s/etc/crypttab" % chroot
log.debug("parsing %s", path)
with open(path) as f:
if not self.blkid_tab:
try:
self.blkid_tab = BlkidTab(chroot=chroot)
self.blkid_tab.parse()
except Exception: # pylint: disable=broad-except
log_exception_info(fmt_str="failed to parse blkid.tab")
self.blkid_tab = None
for line in f.readlines():
(line, _pound, _comment) = line.partition("#")
fields = line.split()
if not 2 <= len(fields) <= 4:
continue
elif len(fields) == 2:
fields.extend(['none', ''])
elif len(fields) == 3:
fields.append('')
(name, devspec, keyfile, options) = fields
# resolve devspec to a device in the tree
device = self.devicetree.resolve_device(devspec,
blkid_tab=self.blkid_tab)
if device:
self.mappings[name] = {"device": device,
"keyfile": keyfile,
"options": options}
def populate(self):
""" Populate the instance based on the device tree's contents. """
for device in self.devicetree.devices:
# XXX should we put them all in there or just the ones that
# are part of a device containing swap or a filesystem?
#
# Put them all in here -- we can filter from FSSet
if device.format.type != "luks":
continue
key_file = device.format.key_file
if not key_file:
key_file = "none"
options = device.format.options or ""
self.mappings[device.format.map_name] = {"device": device,
"keyfile": key_file,
"options": options}
def crypttab(self):
""" Write out /etc/crypttab """
crypttab = ""
for name in self.mappings:
entry = self[name]
crypttab += "%s UUID=%s %s %s\n" % (name,
entry['device'].format.uuid,
entry['keyfile'],
entry['options'])
return crypttab
def __getitem__(self, key):
return self.mappings[key]
def get(self, key, default=None):
return self.mappings.get(key, default)
def get_containing_device(path, devicetree):
""" Return the device that a path resides on. """
if not os.path.exists(path):
return None
st = os.stat(path)
major = os.major(st.st_dev)
minor = os.minor(st.st_dev)
link = "/sys/dev/block/%s:%s" % (major, minor)
if not os.path.exists(link):
return None
try:
device_name = os.path.basename(os.readlink(link))
except Exception: # pylint: disable=broad-except
log_exception_info(fmt_str="failed to find device name for path %s", fmt_args=[path])
return None
if device_name.startswith("dm-"):
# have I told you lately that I love you, device-mapper?
device_name = blockdev.dm.name_from_node(device_name)
return devicetree.get_device_by_name(device_name)
def turn_on_filesystems(storage, mount_only=False, callbacks=None):
"""
Perform installer-specific activation of storage configuration.
:param callbacks: callbacks to be invoked when actions are executed
:type callbacks: return value of the :func:`~.callbacks.create_new_callbacks_register`
"""
if not flags.installer_mode:
return
if not mount_only:
if (flags.live_install and not flags.image_install and not storage.fsset.active):
# turn off any swaps that we didn't turn on
# needed for live installs
util.run_program(["swapoff", "-a"])
storage.devicetree.teardown_all()
try:
storage.do_it(callbacks)
except (FSResizeError, FormatResizeError) as e:
if error_handler.cb(e) == ERROR_RAISE:
raise
except Exception as e:
raise
storage.turn_on_swap()
# FIXME: For livecd, skip_root needs to be True.
storage.mount_filesystems()
if not mount_only:
write_escrow_packets(storage)
def write_escrow_packets(storage):
escrow_devices = [d for d in storage.devices if d.format.type == 'luks' and
d.format.escrow_cert]
if not escrow_devices:
return
log.debug("escrow: write_escrow_packets start")
backup_passphrase = blockdev.crypto.generate_backup_passphrase()
try:
escrow_dir = get_sysroot() + "/root"
log.debug("escrow: writing escrow packets to %s", escrow_dir)
util.makedirs(escrow_dir)
for device in escrow_devices:
log.debug("escrow: device %s: %s",
repr(device.path), repr(device.format.type))
device.format.escrow(escrow_dir,
backup_passphrase)
except (IOError, RuntimeError) as e:
# TODO: real error handling
log.error("failed to store encryption key: %s", e)
log.debug("escrow: write_escrow_packets done")
def storage_initialize(storage, ksdata, protected):
""" Perform installer-specific storage initialization. """
from pyanaconda.flags import flags as anaconda_flags
flags.update_from_anaconda_flags(anaconda_flags)
# Platform class setup depends on flags, re-initialize it.
_platform.update_from_flags()
storage.shutdown()
# Before we set up the storage system, we need to know which disks to
# ignore, etc. Luckily that's all in the kickstart data.
storage.config.update(ksdata)
# Set up the protected partitions list now.
if protected:
storage.config.protected_dev_specs.extend(protected)
while True:
try:
storage.reset()
except StorageError as e:
if error_handler.cb(e) == ERROR_RAISE:
raise
else:
continue
else:
break
if protected and not flags.live_install and \
not any(d.protected for d in storage.devices):
raise UnknownSourceDeviceError(protected)
# kickstart uses all the disks
if flags.automated_install:
if not ksdata.ignoredisk.onlyuse:
ksdata.ignoredisk.onlyuse = [d.name for d in storage.disks
if d.name not in ksdata.ignoredisk.ignoredisk]
log.debug("onlyuse is now: %s", ",".join(ksdata.ignoredisk.onlyuse))
def mount_existing_system(fsset, root_device, read_only=None):
""" Mount filesystems specified in root_device's /etc/fstab file. """
root_path = get_sysroot()
read_only = "ro" if read_only else ""
if root_device.protected and os.path.ismount("/mnt/install/isodir"):
util.mount("/mnt/install/isodir",
root_path,
fstype=root_device.format.type,
options="bind")
else:
root_device.setup()
root_device.format.mount(chroot=root_path,
mountpoint="/",
options=read_only)
fsset.parse_fstab()
fsset.mount_filesystems(root_path=root_path, read_only=read_only, skip_root=True)
| atodorov/blivet | blivet/osinstall.py | Python | gpl-2.0 | 43,581 |
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from six import StringIO
import ramrod
import ramrod.stix
import ramrod.stix.stix_1_1
import ramrod.utils as utils
from ramrod.test import (_BaseVocab, _BaseTrans)
UPDATER_MOD = ramrod.stix.stix_1_1
UPDATER = UPDATER_MOD.STIX_1_1_Updater
PACKAGE_TEMPLATE = \
"""
<stix:STIX_Package
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:stix="http://stix.mitre.org/stix-1"
xmlns:stixCommon="http://stix.mitre.org/common-1"
xmlns:campaign="http://stix.mitre.org/Campaign-1"
xmlns:indicator="http://stix.mitre.org/Indicator-2"
xmlns:et="http://stix.mitre.org/ExploitTarget-1"
xmlns:ttp="http://stix.mitre.org/TTP-1"
xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1"
xmlns:example="http://example.com/"
xmlns:ramrod="http://ramrod.test/"
version="1.1">
%s
</stix:STIX_Package>
"""
class STIX_1_1_Test(unittest.TestCase):
XML_VERSIONS = PACKAGE_TEMPLATE % ""
@classmethod
def setUpClass(cls):
cls._versions = StringIO(cls.XML_VERSIONS)
def test_get_version(self):
root = utils.get_etree_root(self._versions)
version = UPDATER.get_version(root)
self.assertEqual(version, UPDATER.VERSION)
def test_update_version(self):
valid_versions = ramrod.stix.STIX_VERSIONS
idx = valid_versions.index
version_to = valid_versions[idx(UPDATER.VERSION)+1:]
for version in version_to:
updated = ramrod.update(self._versions, to_=version)
updated_root = updated.document.as_element()
updated_version = UPDATER.get_version(updated_root)
self.assertEqual(version, updated_version)
class IndicatorTypeVocab(_BaseVocab):
UPDATER = UPDATER_MOD.STIX_1_1_Updater
VOCAB_KLASS = UPDATER_MOD.AvailabilityLossVocab
VOCAB_COUNT = 1
VOCAB_XML = \
"""
<stix:Indicators>
<stix:Indicator xsi:type="indicator:IndicatorType">
<indicator:Type xsi:type="stixVocabs:AvailabilityLossTypeVocab-1.0">Degredation</indicator:Type>
</stix:Indicator>
</stix:Indicators>
"""
XML = PACKAGE_TEMPLATE % (VOCAB_XML)
class TransCommonSource(_BaseTrans):
UPDATER = UPDATER_MOD.STIX_1_1_Updater
TRANS_KLASS = UPDATER_MOD.TransCommonSource
TRANS_XPATH = "//stixCommon:Source/stixCommon:Identity/stixCommon:Name"
TRANS_VALUE = _BaseTrans.TRANS_VALUE
TRANS_COUNT = 2
TRANS_XML = \
"""
<stixCommon:Confidence>
<stixCommon:Source>{0}</stixCommon:Source>
</stixCommon:Confidence>
<stixCommon:Confidence>
<stixCommon:Source>{0}</stixCommon:Source>
</stixCommon:Confidence>
""".format(TRANS_VALUE)
XML = PACKAGE_TEMPLATE % (TRANS_XML)
class TransSightingSource(_BaseTrans):
UPDATER = UPDATER_MOD.STIX_1_1_Updater
TRANS_KLASS = UPDATER_MOD.TransSightingsSource
TRANS_XPATH = "//indicator:Sighting/indicator:Source/stixCommon:Identity/stixCommon:Name"
TRANS_VALUE = _BaseTrans.TRANS_VALUE
TRANS_COUNT = 2
TRANS_XML = \
"""
<indicator:Sighting>
<indicator:Source>{0}</indicator:Source>
</indicator:Sighting>
<indicator:Sighting>
<indicator:Source>{0}</indicator:Source>
</indicator:Sighting>
""".format(TRANS_VALUE)
XML = PACKAGE_TEMPLATE % (TRANS_XML)
class TransIndicatorRelatedCampaign(_BaseTrans):
UPDATER = UPDATER_MOD.STIX_1_1_Updater
TRANS_KLASS = UPDATER_MOD.TransIndicatorRelatedCampaign
TRANS_XPATH = "//indicator:Related_Campaigns//indicator:Related_Campaign/stixCommon:Campaign/stixCommon:Names/stixCommon:Name"
TRANS_VALUE = _BaseTrans.TRANS_VALUE
TRANS_COUNT = 2
TRANS_XML = \
"""
<indicator:Related_Campaigns>
<indicator:Related_Campaign>
<stixCommon:Names>
<stixCommon:Name>{0}</stixCommon:Name>
</stixCommon:Names>
</indicator:Related_Campaign>
<indicator:Related_Campaign>
<stixCommon:Names>
<stixCommon:Name>{0}</stixCommon:Name>
</stixCommon:Names>
</indicator:Related_Campaign>
</indicator:Related_Campaigns>
""".format(TRANS_VALUE)
XML = PACKAGE_TEMPLATE % (TRANS_XML)
if __name__ == "__main__":
unittest.main() | STIXProject/stix-ramrod | ramrod/test/stix/stix_1_1_test.py | Python | bsd-3-clause | 4,360 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import os, sys
sys.path.insert(0, os.path.abspath('../'))
from libs import utils
class TestBasePage(unittest.TestCase):
def test_user_in_group(self):
self.assertTrue(utils.test_in_group())
with self.assertRaises(SystemExit) as sys_exit:
utils.test_in_group('fake_docker_group')
self.assertEqual(sys_exit.exception.code, 1)
if __name__ == '__main__':
unittest.main()
| maidstone-hackspace/dockyard | tests/test_user_in_group.py | Python | gpl-3.0 | 486 |
from celery.task import task
from django.core.cache import cache
from soil import DownloadBase
from corehq.apps.locations.bulk import import_locations
from corehq.apps.commtrack.bulk import import_stock_reports
from soil.util import expose_download
from dimagi.utils.excel import WorkbookJSONReader
@task
def import_locations_async(domain, file_ref_id):
task = import_locations_async
DownloadBase.set_progress(task, 0, 100)
download_ref = DownloadBase.get(file_ref_id)
workbook = WorkbookJSONReader(download_ref.get_filename())
worksheets = workbook.worksheets
results = list(import_locations(domain, worksheets, task))
DownloadBase.set_progress(task, 100, 100)
return {
'messages': results
}
@task
def import_stock_reports_async(download_id, domain, file_ref_id):
"""
Same idea but for stock reports
"""
download_ref = DownloadBase.get(file_ref_id)
with open(download_ref.get_filename(), 'rb') as f:
try:
results = import_stock_reports(domain, f)
except Exception, e:
results = "ERROR: %s" % e
ref = expose_download(results, 60*60*3, mimetype='text/csv')
cache.set(download_id, ref)
| gmimano/commcaretest | corehq/apps/commtrack/tasks.py | Python | bsd-3-clause | 1,204 |
class Solution(object):
def canFinish(self, numCourses, prerequisites):
"""
:type numCourses: int
:type prerequisites: List[List[int]]
:rtype: bool
"""
| rx2130/Leetcode | python/207 Course Schedule.py | Python | apache-2.0 | 196 |
import calendar
import datetime
from django.conf import settings
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from corehq.apps.app_manager.dbaccessors import get_brief_apps_in_domain
from corehq.apps.commtrack.const import USER_LOCATION_OWNER_MAP_TYPE
from corehq.apps.export.models.incremental import IncrementalExport
from corehq.apps.groups.models import Group
from corehq.apps.reports.analytics.esaccessors import (
get_case_types_for_domain,
)
from corehq.apps.reports.filters.base import (
BaseMultipleOptionFilter,
BaseSingleOptionFilter,
)
from corehq.motech.repeaters.const import (
RECORD_CANCELLED_STATE,
RECORD_FAILURE_STATE,
RECORD_PENDING_STATE,
RECORD_SUCCESS_STATE,
)
from corehq.motech.repeaters.dbaccessors import get_repeaters_by_domain
class GroupFilterMixin(object):
slug = "group"
label = ugettext_lazy("Group")
default_text = ugettext_lazy("Everybody")
@property
def options(self):
return [(group.get_id, group.name) for group in Group.get_reporting_groups(self.domain)]
class GroupFilter(GroupFilterMixin, BaseSingleOptionFilter):
placeholder = ugettext_lazy('Click to select a group')
class MultiGroupFilter(GroupFilterMixin, BaseMultipleOptionFilter):
placeholder = ugettext_lazy('Click to select groups')
class YearFilter(BaseSingleOptionFilter):
slug = "year"
label = ugettext_lazy("Year")
default_text = None
@property
def options(self):
start_year = getattr(settings, 'START_YEAR', 2008)
years = [(str(y), y) for y in range(start_year, datetime.datetime.utcnow().year + 1)]
years.reverse()
return years
class MonthFilter(BaseSingleOptionFilter):
slug = "month"
label = ugettext_lazy("Month")
default_text = None
@property
def options(self):
return [("%02d" % m, calendar.month_name[m]) for m in range(1, 13)]
class CaseTypeMixin(object):
slug = "case_type"
label = ugettext_lazy("Case Type")
default_text = ugettext_lazy("All Case Types")
@property
def options(self):
case_types = sorted(get_case_types_for_domain(self.domain))
return [(case, "%s" % case) for case in case_types
if case != USER_LOCATION_OWNER_MAP_TYPE]
class CaseTypeFilter(CaseTypeMixin, BaseSingleOptionFilter):
placeholder = ugettext_lazy('Click to select a case type')
class MultiCaseTypeFilter(CaseTypeMixin, BaseMultipleOptionFilter):
placeholder = ugettext_lazy('Click to select case types')
class SelectOpenCloseFilter(BaseSingleOptionFilter):
slug = "is_open"
label = ugettext_lazy("Open / Closed")
default_text = ugettext_lazy("Show All")
@property
def options(self):
return [
('open', _("Only Open")),
('closed', _("Only Closed")),
]
class SelectApplicationFilter(BaseSingleOptionFilter):
slug = "app"
label = ugettext_lazy("Application")
if settings.SERVER_ENVIRONMENT in settings.ICDS_ENVS:
default_text = ugettext_lazy("Select Application")
else:
default_text = ugettext_lazy("Select Application [Latest Build Version]")
@property
def options(self):
apps_for_domain = get_brief_apps_in_domain(self.domain)
if settings.SERVER_ENVIRONMENT in settings.ICDS_ENVS:
return [(app.get_id, _("{name}".format(
name=app.name))) for app in apps_for_domain
]
else:
return [(app.get_id, _("{name} [up to build {version}]".format(
name=app.name,
version=app.version))) for app in apps_for_domain
]
class RepeaterFilter(BaseSingleOptionFilter):
slug = 'repeater'
label = ugettext_lazy('Repeater')
default_text = ugettext_lazy("All Repeaters")
placeholder = ugettext_lazy('Click to select repeaters')
@property
def options(self):
return [(r.get_id, str(r)) for r in self._get_repeaters()]
def _get_repeaters(self):
return get_repeaters_by_domain(self.domain)
class RepeatRecordStateFilter(BaseSingleOptionFilter):
slug = "record_state"
label = ugettext_lazy("Record Status")
default_text = ugettext_lazy("Show All")
@property
def options(self):
return [
(RECORD_SUCCESS_STATE, _("Successful")),
(RECORD_PENDING_STATE, _("Pending")),
(RECORD_CANCELLED_STATE, _("Cancelled")),
(RECORD_FAILURE_STATE, _("Failed")),
]
class IncrementalExportFilter(BaseSingleOptionFilter):
slug = 'incremental_export_id'
label = ugettext_lazy('Incremental Export')
default_text = ugettext_lazy("All Incremental Exports")
@property
def options(self):
return [(str(i[0]), i[1]) for i in IncrementalExport.objects.filter(
domain=self.domain
).values_list('id', 'name').all()]
| dimagi/commcare-hq | corehq/apps/reports/filters/select.py | Python | bsd-3-clause | 4,942 |
class ReadableException(Exception):
default_detail = 'A server error occurred.'
default_code = 'error'
def __init__(self, detail=None, code=None):
if detail is None:
detail = self.default_detail
if code is None:
code = self.default_code
self.detail = detail
self.code = code
| makeev/django-boilerplate | back/main/exceptions.py | Python | mit | 345 |
# Copyright (c) 2016, Louis Opter <louis@opter.org>
#
# This file is part of lightsd.
#
# lightsd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# lightsd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with lightsd. If not, see <http://www.gnu.org/licenses/>.
from .ui import start # noqa
| lopter/lightsd | apps/monolight/monolight/ui/__init__.py | Python | gpl-3.0 | 738 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for random-number generation ops in the XLA JIT compiler."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import special_math
from tensorflow.python.platform import googletest
class RandomOpsTest(xla_test.XLATestCase):
"""Test cases for random-number generating operators."""
def _random_types(self):
return set(self.numeric_types) - set(
self.complex_types) - {np.uint64, np.int64, np.uint8, np.int8}
def _testRngIsNotConstant(self, rng, dtype):
# Tests that 'rng' does not always return the same value.
with self.session() as sess:
with self.test_scope():
x = rng(dtype)
# The random-number generator, if working correctly, should produce the
# same output multiple times with low probability.
y = self.evaluate(x)
z = self.evaluate(x)
w = self.evaluate(x)
# We use exact equality here. If the random-number generator is producing
# deterministic output, all three outputs will be bitwise identical.
self.assertTrue((not np.array_equal(y, z)) or
(not np.array_equal(z, w)) or (not np.array_equal(y, w)))
def testRandomUniformIsNotConstant(self):
def rng(dtype):
dtype = dtypes.as_dtype(dtype)
return random_ops.random_uniform(shape=[2], dtype=dtype, maxval=dtype.max)
for dtype in self._random_types():
self._testRngIsNotConstant(rng, dtype)
def testRandomNormalIsNotConstant(self):
def rng(dtype):
return random_ops.random_normal(shape=[2], dtype=dtype)
for dtype in self._random_types() & self.float_types:
self._testRngIsNotConstant(rng, dtype)
def testRandomNormalMean(self):
for dtype in self._random_types() & self.float_types:
with self.session():
with self.test_scope():
normal = random_ops.random_normal([1024],
dtype=dtype,
mean=1.4,
stddev=1.2)
mean = math_ops.reduce_mean(normal)
x = self.evaluate(mean)
self.assertAllClose(x, 1.4, rtol=1e-1, atol=1e-1)
def testRandomNormalVariance(self):
for dtype in self._random_types() & self.float_types:
with self.session():
with self.test_scope():
normal = random_ops.random_normal([1024],
dtype=dtype,
mean=2.3,
stddev=2.0)
variance = math_ops.reduce_variance(normal)
x = self.evaluate(variance)
self.assertAllClose(x, 4.0, rtol=1e-1, atol=1e-1)
def testRandomUniformIsInRange(self):
for dtype in self._random_types():
# TODO (b/112272078): enable bfloat16 for CPU and GPU when the bug is
# fixed.
if (self.device in ["XLA_GPU", "XLA_CPU"
]) and (dtype in [dtypes.bfloat16, dtypes.half]):
continue
with self.session() as sess:
with self.test_scope():
x = random_ops.random_uniform(
shape=[1000], dtype=dtype, minval=-2, maxval=33)
y = self.evaluate(x)
self.assertTrue((y >= -2).sum() == 1000)
self.assertTrue((y < 33).sum() == 1000)
def testTruncatedNormalIsNotConstant(self):
def rng(dtype):
return random_ops.truncated_normal(shape=[2], dtype=dtype)
self._testRngIsNotConstant(rng, dtypes.float32)
def testTruncatedNormalIsInRange(self):
count = 10000000
# TODO(b/34339814): make this test work with 16 bit float types.
for dtype in self._random_types() & {dtypes.float32, dtypes.float64}:
with self.session() as sess:
with self.test_scope():
x = random_ops.truncated_normal(shape=[count], dtype=dtype)
y = self.evaluate(x)
def normal_cdf(x):
return .5 * math.erfc(-x / math.sqrt(2))
def normal_pdf(x):
return math.exp(-(x**2) / 2.) / math.sqrt(2 * math.pi)
def probit(x, sess=sess):
return self.evaluate(special_math.ndtri(x))
a = -2.
b = 2.
mu = 0.
sigma = 1.
alpha = (a - mu) / sigma
beta = (b - mu) / sigma
z = normal_cdf(beta) - normal_cdf(alpha)
self.assertEqual((y >= a).sum(), count)
self.assertEqual((y <= b).sum(), count)
# For more information on these calculations, see:
# Burkardt, John. "The Truncated Normal Distribution".
# Department of Scientific Computing website. Florida State University.
expected_mean = mu + (normal_pdf(alpha) - normal_pdf(beta)) / z * sigma
actual_mean = np.mean(y)
self.assertAllClose(actual_mean, expected_mean, atol=2e-3)
expected_median = mu + probit(
(normal_cdf(alpha) + normal_cdf(beta)) / 2.) * sigma
actual_median = np.median(y)
self.assertAllClose(actual_median, expected_median, atol=1e-2)
expected_variance = sigma**2 * (1 + (
(alpha * normal_pdf(alpha) - beta * normal_pdf(beta)) / z) - (
(normal_pdf(alpha) - normal_pdf(beta)) / z)**2)
actual_variance = np.var(y)
self.assertAllClose(actual_variance, expected_variance, rtol=2*1e-3)
def testShuffle1d(self):
with self.session() as sess:
with self.test_scope():
x = math_ops.range(1 << 16)
shuffle = random_ops.random_shuffle(x)
result = self.evaluate(shuffle)
expected = range(1 << 16)
# Compare sets to avoid randomness behavior changes but make sure still
# have all the values.
self.assertAllEqual(set(result), set(expected))
def testShuffle2d(self):
with self.session() as sess:
with self.test_scope():
x = array_ops.diag(math_ops.range(20))
shuffle = random_ops.random_shuffle(x)
result = self.evaluate(shuffle)
expected = np.diag(range(20)).flatten()
# Compare sets to avoid randomness behavior changes but make sure still
# have all the values.
self.assertAllEqual(len(result.flatten()), len(expected))
self.assertAllEqual(set(result.flatten()), set(expected))
if __name__ == '__main__':
googletest.main()
| alsrgv/tensorflow | tensorflow/compiler/tests/random_ops_test.py | Python | apache-2.0 | 7,283 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def HostScsiTopologyInterface(vim, *args, **kwargs):
'''This data object type describes the SCSI interface that is associated with a
list of targets.'''
obj = vim.client.factory.create('ns0:HostScsiTopologyInterface')
# do some validation checking...
if (len(args) + len(kwargs)) < 2:
raise IndexError('Expected at least 3 arguments got: %d' % len(args))
required = [ 'adapter', 'key' ]
optional = [ 'target', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| xuru/pyvisdk | pyvisdk/do/host_scsi_topology_interface.py | Python | mit | 1,076 |
#!/usr/bin/env python3
import argparse
import sys
import os
from fluxghost.pipe_route import get_match_pipe_service, ROUTES
from fluxghost.launcher import setup_env, show_version
def main():
parser = argparse.ArgumentParser(description='FLUX Shadow')
parser.add_argument("--log", dest='logfile', type=str, default=None,
help="Output log to specific")
parser.add_argument('-d', '--debug', dest='debug', action='store_const',
const=True, default=False, help='Enable debug')
parser.add_argument('-s', '--simulate', dest='simulate',
action='store_const', const=True, default=False,
help='Simulate data')
parser.add_argument("--slic3r", dest='slic3r', type=str,
default='../Slic3r/slic3r.pl',
help="Set slic3r location")
parser.add_argument("--cura", dest='cura', type=str,
default='',
help="Set cura location")
parser.add_argument("--sentry", dest='sentry', type=str, default=None,
help="Use sentry logger")
parser.add_argument('--test', dest='test', action='store_const',
const=True, default=False, help='Run test')
parser.add_argument('--version', dest='version', action='store_const',
const=True, default=False, help='Show version')
parser.add_argument(dest='task', choices=ROUTES.keys(), help='Task')
parser.add_argument(dest='arguments', type=str, nargs='*',
help='Task args')
options = parser.parse_args()
if options.version:
show_version(options.debug)
sys.exit(0)
setup_env(options)
if options.test:
from tests.main import main
main()
sys.exit(0)
if options.slic3r:
os.environ["slic3r"] = options.slic3r
if options.cura:
os.environ["cura"] = options.cura
klass = get_match_pipe_service(options.task)
instance = klass(sys.stdin.buffer, sys.stdout.buffer, options,
*options.arguments)
instance.serve_forever()
return 0
if __name__ == '__main__':
sys.exit(main())
| flux3dp/fluxghost | shadow.py | Python | agpl-3.0 | 2,230 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2017, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_network_acl
short_description: Manages network access control lists (ACL) on Apache CloudStack based clouds.
description:
- Create and remove network ACLs.
version_added: "2.4"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the network ACL.
required: true
description:
description:
- Description of the network ACL.
- If not set, identical to C(name).
vpc:
description:
- VPC the network ACL is related to.
required: true
state:
description:
- State of the network ACL.
default: 'present'
choices: [ 'present', 'absent' ]
domain:
description:
- Domain the network ACL rule is related to.
account:
description:
- Account the network ACL rule is related to.
project:
description:
- Name of the project the network ACL is related to.
zone:
description:
- Name of the zone the VPC is related to.
- If not set, default zone is used.
poll_async:
description:
- Poll async jobs until job has finished.
type: bool
default: 'yes'
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# create a network ACL
- local_action:
module: cs_network_acl
name: Webserver ACL
description: a more detailed description of the ACL
vpc: customers
# remove a network ACL
- local_action:
module: cs_network_acl
name: Webserver ACL
vpc: customers
state: absent
'''
RETURN = '''
---
name:
description: Name of the network ACL.
returned: success
type: str
sample: customer acl
description:
description: Description of the network ACL.
returned: success
type: str
sample: Example description of a network ACL
vpc:
description: VPC of the network ACL.
returned: success
type: str
sample: customer vpc
zone:
description: Zone the VPC is related to.
returned: success
type: str
sample: ch-gva-2
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together
)
class AnsibleCloudStackNetworkAcl(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackNetworkAcl, self).__init__(module)
def get_network_acl(self):
args = {
'name': self.module.params.get('name'),
'vpcid': self.get_vpc(key='id'),
}
network_acls = self.query_api('listNetworkACLLists', **args)
if network_acls:
return network_acls['networkacllist'][0]
return None
def present_network_acl(self):
network_acl = self.get_network_acl()
if not network_acl:
self.result['changed'] = True
args = {
'name': self.module.params.get('name'),
'description': self.get_or_fallback('description', 'name'),
'vpcid': self.get_vpc(key='id')
}
if not self.module.check_mode:
res = self.query_api('createNetworkACLList', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
network_acl = self.poll_job(res, 'networkacllist')
return network_acl
def absent_network_acl(self):
network_acl = self.get_network_acl()
if network_acl:
self.result['changed'] = True
args = {
'id': network_acl['id'],
}
if not self.module.check_mode:
res = self.query_api('deleteNetworkACLList', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(res, 'networkacllist')
return network_acl
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
description=dict(),
vpc=dict(required=True),
state=dict(choices=['present', 'absent'], default='present'),
zone=dict(),
domain=dict(),
account=dict(),
project=dict(),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_network_acl = AnsibleCloudStackNetworkAcl(module)
state = module.params.get('state')
if state == 'absent':
network_acl = acs_network_acl.absent_network_acl()
else:
network_acl = acs_network_acl.present_network_acl()
result = acs_network_acl.get_result(network_acl)
module.exit_json(**result)
if __name__ == '__main__':
main()
| valentin-krasontovitsch/ansible | lib/ansible/modules/cloud/cloudstack/cs_network_acl.py | Python | gpl-3.0 | 5,606 |
###
# Copyright (c) 2004, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import time
TIME = time # For later use.
from dateutil import parser
import supybot.conf as conf
import supybot.utils as utils
from supybot.commands import *
import supybot.callbacks as callbacks
def parse(s):
todo = []
s = s.replace('noon', '12:00')
s = s.replace('midnight', '00:00')
if 'tomorrow' in s:
todo.append(lambda i: i + 86400)
s = s.replace('tomorrow', '')
if 'next week' in s:
todo.append(lambda i: i + 86400*7)
s = s.replace('next week', '')
i = int(time.mktime(parser.parse(s, fuzzy=True).timetuple()))
for f in todo:
i = f(i)
return i
class Time(callbacks.Plugin):
def seconds(self, irc, msg, args):
"""[<years>y] [<weeks>w] [<days>d] [<hours>h] [<minutes>m] [<seconds>s]
Returns the number of seconds in the number of <years>, <weeks>,
<days>, <hours>, <minutes>, and <seconds> given. An example usage is
"seconds 2h 30m", which would return 9000, which is '3600*2 + 30*60'.
Useful for scheduling events at a given number of seconds in the
future.
"""
if not args:
raise callbacks.ArgumentError
seconds = 0
for arg in args:
if not arg or arg[-1] not in 'ywdhms':
raise callbacks.ArgumentError
(s, kind) = arg[:-1], arg[-1]
try:
i = int(s)
except ValueError:
irc.errorInvalid('argument', arg, Raise=True)
if kind == 'y':
seconds += i*31536000
elif kind == 'w':
seconds += i*604800
elif kind == 'd':
seconds += i*86400
elif kind == 'h':
seconds += i*3600
elif kind == 'm':
seconds += i*60
elif kind == 's':
seconds += i
irc.reply(str(seconds))
def at(self, irc, msg, args, s):
"""<time string>
Returns the number of seconds since epoch <time string> is.
<time string> can be any number of natural formats; just try something
and see if it will work.
"""
now = int(time.time())
new = parse(s)
if new != now:
irc.reply(str(new))
else:
irc.error('That\'s right now!')
at = wrap(at, ['text'])
def until(self, irc, msg, args, s):
"""<time string>
Returns the number of seconds until <time string>.
"""
now = int(time.time())
new = parse(s)
if new != now:
if new - now < 0:
new += 86400
irc.reply(str(new-now))
else:
irc.error('That\'s right now!')
until = wrap(until, ['text'])
def ctime(self, irc, msg, args, seconds):
"""[<seconds since epoch>]
Returns the ctime for <seconds since epoch>, or the current ctime if
no <seconds since epoch> is given.
"""
irc.reply(time.ctime(seconds))
ctime = wrap(ctime, [additional(('int', 'number of seconds since epoch'),
TIME.time)])
def time(self, irc, msg, args, channel, format, seconds):
"""[<format>] [<seconds since epoch>]
Returns the current time in <format> format, or, if <format> is not
given, uses the configurable format for the current channel. If no
<seconds since epoch> time is given, the current time is used.
"""
if not format:
if channel:
format = self.registryValue('format', channel)
else:
format = self.registryValue('format')
irc.reply(time.strftime(format, time.localtime(seconds)))
time = wrap(time, [optional('channel'), optional('nonInt'),
additional('float', TIME.time)])
def elapsed(self, irc, msg, args, seconds):
"""<seconds>
Returns a pretty string that is the amount of time represented by
<seconds>.
"""
irc.reply(utils.timeElapsed(seconds))
elapsed = wrap(elapsed, ['int'])
Class = Time
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| tecan/xchat-rt | plugins/scripts/encryption/supybot-code-6361b1e856ebbc8e14d399019e2c53a35f4e0063/plugins/Time/plugin.py | Python | gpl-2.0 | 5,772 |
#!/usr/bin/env python
'''
Copyright 2009, The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
# script to highlight adb logcat output for console
# written by jeff sharkey, http://jsharkey.org/
# piping detection and popen() added by other android team members
import os, sys, re, StringIO
import fcntl, termios, struct
# unpack the current terminal width/height
data = fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ, '1234')
HEIGHT, WIDTH = struct.unpack('hh',data)
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
def format(fg=None, bg=None, bright=False, bold=False, dim=False, reset=False):
# manually derived from http://en.wikipedia.org/wiki/ANSI_escape_code#Codes
codes = []
if reset: codes.append("0")
else:
if not fg is None: codes.append("3%d" % (fg))
if not bg is None:
if not bright: codes.append("4%d" % (bg))
else: codes.append("10%d" % (bg))
if bold: codes.append("1")
elif dim: codes.append("2")
else: codes.append("22")
return "\033[%sm" % (";".join(codes))
def indent_wrap(message, indent=0, width=80):
wrap_area = width - indent
messagebuf = StringIO.StringIO()
current = 0
while current < len(message):
next = min(current + wrap_area, len(message))
messagebuf.write(message[current:next])
if next < len(message):
messagebuf.write("\n%s" % (" " * indent))
current = next
return messagebuf.getvalue()
LAST_USED = [RED,GREEN,YELLOW,BLUE,MAGENTA,CYAN,WHITE]
KNOWN_TAGS = {
"dalvikvm": BLUE,
"Process": BLUE,
"ActivityManager": CYAN,
"ActivityThread": CYAN,
}
def allocate_color(tag):
# this will allocate a unique format for the given tag
# since we dont have very many colors, we always keep track of the LRU
if not tag in KNOWN_TAGS:
KNOWN_TAGS[tag] = LAST_USED[0]
color = KNOWN_TAGS[tag]
LAST_USED.remove(color)
LAST_USED.append(color)
return color
RULES = {
#re.compile(r"([\w\.@]+)=([\w\.@]+)"): r"%s\1%s=%s\2%s" % (format(fg=BLUE), format(fg=GREEN), format(fg=BLUE), format(reset=True)),
}
TAGTYPE_WIDTH = 3
TAG_WIDTH = 20
PROCESS_WIDTH = 8 # 8 or -1
HEADER_SIZE = TAGTYPE_WIDTH + 1 + TAG_WIDTH + 1 + PROCESS_WIDTH + 1
TAGTYPES = {
"V": "%s%s%s " % (format(fg=WHITE, bg=BLACK), "V".center(TAGTYPE_WIDTH), format(reset=True)),
"D": "%s%s%s " % (format(fg=BLACK, bg=BLUE), "D".center(TAGTYPE_WIDTH), format(reset=True)),
"I": "%s%s%s " % (format(fg=BLACK, bg=GREEN), "I".center(TAGTYPE_WIDTH), format(reset=True)),
"W": "%s%s%s " % (format(fg=BLACK, bg=YELLOW), "W".center(TAGTYPE_WIDTH), format(reset=True)),
"E": "%s%s%s " % (format(fg=BLACK, bg=RED), "E".center(TAGTYPE_WIDTH), format(reset=True)),
}
retag = re.compile("^([A-Z])/([^\(]+)\(([^\)]+)\): (.*)$")
# to pick up -d or -e
adb_args = ' '.join(sys.argv[1:])
# if someone is piping in to us, use stdin as input. if not, invoke adb logcat
if os.isatty(sys.stdin.fileno()):
input = os.popen("adb %s logcat" % adb_args)
else:
input = sys.stdin
while True:
try:
line = input.readline()
except KeyboardInterrupt:
break
match = retag.match(line)
if not match is None:
tagtype, tag, owner, message = match.groups()
linebuf = StringIO.StringIO()
# center process info
if PROCESS_WIDTH > 0:
owner = owner.strip().center(PROCESS_WIDTH)
linebuf.write("%s%s%s " % (format(fg=BLACK, bg=BLACK, bright=True), owner, format(reset=True)))
# right-align tag title and allocate color if needed
tag = tag.strip()
color = allocate_color(tag)
tag = tag[-TAG_WIDTH:].rjust(TAG_WIDTH)
linebuf.write("%s%s %s" % (format(fg=color, dim=False), tag, format(reset=True)))
# write out tagtype colored edge
if not tagtype in TAGTYPES: break
linebuf.write(TAGTYPES[tagtype])
# insert line wrapping as needed
message = indent_wrap(message, HEADER_SIZE, WIDTH)
# format tag message using rules
for matcher in RULES:
replace = RULES[matcher]
message = matcher.sub(replace, message)
linebuf.write(message)
line = linebuf.getvalue()
print line
if len(line) == 0: break
| houseofadams/dotfiles | bin/coloredlogcat.py | Python | apache-2.0 | 4,876 |
import lldb
import re
import testutils as test
# bpmd -clearall
def runScenario(assembly, debugger, target):
process = target.GetProcess()
res = lldb.SBCommandReturnObject()
ci = debugger.GetCommandInterpreter()
# Run debugger, wait until libcoreclr is loaded,
# set breakpoint at Test.Main and stop there
test.stop_in_main(debugger, assembly)
# Set breakpoint
ci.HandleCommand("bpmd " + assembly + " Test.UnlikelyInlined", res)
out_msg = res.GetOutput()
err_msg = res.GetError()
print(out_msg)
print(err_msg)
# Interpreter must have this command and able to run it
test.assertTrue(res.Succeeded())
# Output is not empty
# Should be at least 'Adding pending breakpoints...'
test.assertTrue(len(out_msg) > 0)
# Error message is empty
test.assertTrue(len(err_msg) == 0)
# Delete all breakpoints
ci.HandleCommand("bpmd -clearall", res)
out_msg = res.GetOutput()
err_msg = res.GetError()
print(out_msg)
print(err_msg)
# Interpreter must have this command and able to run it
test.assertTrue(res.Succeeded())
match = re.search('All pending breakpoints cleared.', out_msg)
# Check for specific output
test.assertTrue(match)
# Error message is empty
test.assertEqual(err_msg, '')
process.Continue()
# Process must be exited
test.assertEqual(process.GetState(), lldb.eStateExited)
# The reason of this stop must be a breakpoint
test.assertEqual(process.GetSelectedThread().GetStopReason(),
lldb.eStopReasonNone)
#
# Delete all breakpoints, continue current process and checks its exit code
test.exit_lldb(debugger, assembly)
| ragmani/coreclr | src/ToolBox/SOS/tests/t_cmd_bpmd_clearall.py | Python | mit | 1,713 |
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
#pylint: disable=invalid-name
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QFrame) # noqa
from qtpy.QtGui import (QDoubleValidator) # noqa
import reduction_gui.widgets.util as util
from reduction_gui.reduction.sans.hfir_background_script import Background
from reduction_gui.widgets.base_widget import BaseWidget
from reduction_gui.widgets.sans.hfir_sample_data import BeamSpreader, DirectBeam
try:
from mantidqt.utils.qt import load_ui
except ImportError:
from mantid.kernel import Logger
Logger("BckDirectBeam").information('Using legacy ui importer')
from mantidplot import load_ui
class BckDirectBeam(DirectBeam):
def __init__(self, parent=None, state=None, settings=None, data_type=None, data_proxy=None):
super(BckDirectBeam, self).__init__(parent, state, settings, data_type, data_proxy=data_proxy)
if state is None:
self.set_state(Background.DirectBeam())
def get_state(self):
direct_beam = super(BckDirectBeam, self).get_state()
m = Background.DirectBeam(direct_beam)
return m
def set_state(self, state):
super(BckDirectBeam, self).set_state(state)
class BckBeamSpreader(BeamSpreader):
def __init__(self, parent=None, state=None, settings=None, data_type=None, data_proxy=None):
super(BckBeamSpreader, self).__init__(parent, state, settings, data_type, data_proxy=data_proxy)
if state is None:
self.set_state(Background.BeamSpreader())
def get_state(self):
direct_beam = super(BckBeamSpreader, self).get_state()
m = Background.BeamSpreader(direct_beam)
return m
def set_state(self, state):
super(BckBeamSpreader, self).set_state(state)
class BackgroundWidget(BaseWidget):
"""
Widget that presents the transmission options to the user
"""
_method_box = None
## Widget name
name = "Background"
def __init__(self, parent=None, state=None, settings=None, show_transmission=True, data_type=None, data_proxy=None):
super(BackgroundWidget, self).__init__(parent, state, settings, data_type, data_proxy=data_proxy)
class BckFrame(QFrame):
def __init__(self, parent=None):
QFrame.__init__(self, parent)
self.ui = load_ui(__file__, '../../../ui/sans/hfir_background.ui', baseinstance=self)
self._content = BckFrame(self)
self._layout.addWidget(self._content)
# Flag to show transmission options or not
self.show_transmission = show_transmission
self.initialize_content()
if state is not None:
self.set_state(state)
else:
m = Background()
self.set_state(m)
self._last_direct_state = None
self._last_spreader_state = None
def initialize_content(self):
"""
Declare the validators and event connections for the
widgets loaded through the .ui file.
"""
# Validators
self._content.transmission_edit.setValidator(QDoubleValidator(self._content.transmission_edit))
self._content.dtransmission_edit.setValidator(QDoubleValidator(self._content.dtransmission_edit))
#self._content.thickness_edit.setValidator(QDoubleValidator(self._content.thickness_edit))
# Connections
self._content.calculate_trans_chk.clicked.connect(self._calculate_clicked)
self._content.trans_direct_chk.clicked.connect(self._direct_beam)
self._content.trans_spreader_chk.clicked.connect(self._beam_spreader)
self._content.background_chk.clicked.connect(self._background_clicked)
self._content.background_browse.clicked.connect(self._background_browse)
self._content.trans_dark_current_button.clicked.connect(self._trans_dark_current_browse)
self._content.background_plot_button.clicked.connect(self._background_plot_clicked)
self._content.trans_dark_current_plot_button.clicked.connect(self._trans_dark_current_plot_clicked)
# Process transmission option
if not self.show_transmission:
self._content.calculate_trans_chk.hide()
self._content.bck_trans_label.hide()
self._content.bck_trans_err_label.hide()
self._content.transmission_edit.hide()
self._content.dtransmission_edit.hide()
self._content.calculate_trans_chk.hide()
self._content.theta_dep_chk.hide()
self._content.trans_direct_chk.hide()
self._content.trans_spreader_chk.hide()
self._content.trans_dark_current_label.hide()
self._content.trans_dark_current_edit.hide()
self._content.trans_dark_current_button.hide()
if not self._has_instrument_view:
self._content.background_plot_button.hide()
self._content.trans_dark_current_plot_button.hide()
def _background_plot_clicked(self):
self.show_instrument(file_name=self._content.background_edit.text)
def _trans_dark_current_plot_clicked(self):
self.show_instrument(file_name=self._content.trans_dark_current_edit.text)
def set_state(self, state):
"""
Populate the UI elements with the data from the given state.
@param state: Transmission object
"""
bck_file = str(self._content.background_edit.text()).strip()
self._content.background_chk.setChecked(state.background_corr)
self._content.background_edit.setText(state.background_file)
if state.background_file.strip() != bck_file:
self.get_data_info()
self._background_clicked(state.background_corr)
if self.show_transmission:
self._content.transmission_edit.setText(str("%6.4f" % state.bck_transmission))
self._content.dtransmission_edit.setText(str("%6.4f" % state.bck_transmission_spread))
#self._content.thickness_edit.setText("%6.4f" % state.sample_thickness)
if isinstance(state.trans_calculation_method, state.DirectBeam):
self._content.trans_direct_chk.setChecked(True)
self._direct_beam(state=state.trans_calculation_method)
else:
self._content.trans_spreader_chk.setChecked(True)
self._beam_spreader(state=state.trans_calculation_method)
self._content.calculate_trans_chk.setChecked(state.calculate_transmission)
self._content.theta_dep_chk.setChecked(state.theta_dependent)
self._content.trans_dark_current_edit.setText(str(state.trans_dark_current))
self._calculate_clicked(state.calculate_transmission)
def get_state(self):
"""
Returns an object with the state of the interface
"""
m = Background()
m.background_corr = self._content.background_chk.isChecked()
m.background_file = str(self._content.background_edit.text())
m.bck_transmission_enabled = self.show_transmission
if self.show_transmission:
#m.sample_thickness = util._check_and_get_float_line_edit(self._content.thickness_edit)
m.bck_transmission = util._check_and_get_float_line_edit(self._content.transmission_edit)
m.bck_transmission_spread = util._check_and_get_float_line_edit(self._content.dtransmission_edit)
m.calculate_transmission = self._content.calculate_trans_chk.isChecked()
m.theta_dependent = self._content.theta_dep_chk.isChecked()
m.trans_dark_current = self._content.trans_dark_current_edit.text()
if self._method_box is not None:
m.trans_calculation_method=self._method_box.get_state()
return m
def _trans_dark_current_browse(self):
fname = self.data_browse_dialog()
if fname:
self._content.trans_dark_current_edit.setText(fname)
def _direct_beam(self, state=None):
if state is None:
state = self._last_direct_state
if isinstance(self._method_box, BckBeamSpreader):
self._last_spreader_state = self._method_box.get_state()
if self.show_transmission:
self._replace_method(BckDirectBeam(self, state=state, settings=self._settings,
data_type=self._data_type, data_proxy=self._data_proxy))
def _beam_spreader(self, state=None):
if state is None:
state = self._last_spreader_state
if isinstance(self._method_box, BckDirectBeam):
self._last_direct_state = self._method_box.get_state()
if self.show_transmission:
self._replace_method(BckBeamSpreader(self, state=state, settings=self._settings,
data_type=self._data_type, data_proxy=self._data_proxy))
def _replace_method(self, widget):
if self._method_box is not None:
for i in range(0, self._content.widget_placeholder.count()):
item = self._content.widget_placeholder.itemAt(i)
self._content.widget_placeholder.removeItem(self._content.widget_placeholder.itemAt(i))
item.widget().deleteLater()
self._method_box = widget
self._content.widget_placeholder.addWidget(self._method_box)
def _background_clicked(self, is_checked):
self._content.background_edit.setEnabled(is_checked)
#self._content.thickness_edit.setEnabled(is_checked)
#self._content.thickness_label.setEnabled(is_checked)
self._content.geometry_options_groupbox.setEnabled(is_checked)
self._content.background_browse.setEnabled(is_checked)
self._content.background_plot_button.setEnabled(is_checked)
self._content.calculate_trans_chk.setEnabled(is_checked)
self._content.theta_dep_chk.setEnabled(is_checked)
self._content.bck_trans_label.setEnabled(is_checked)
self._content.bck_trans_err_label.setEnabled(is_checked)
self._content.transmission_grpbox.setEnabled(is_checked)
self._calculate_clicked(is_checked and self._content.calculate_trans_chk.isChecked())
def _background_browse(self):
fname = self.data_browse_dialog()
if fname:
bck_file = str(self._content.background_edit.text()).strip()
self._content.background_edit.setText(fname)
if str(fname).strip() != bck_file:
self.get_data_info()
def _calculate_clicked(self, is_checked):
self._content.trans_direct_chk.setEnabled(is_checked)
self._content.trans_spreader_chk.setEnabled(is_checked)
if self._method_box is not None:
self._method_box.setEnabled(is_checked)
self._content.transmission_edit.setEnabled(not is_checked and self._content.background_chk.isChecked())
self._content.dtransmission_edit.setEnabled(not is_checked and self._content.background_chk.isChecked())
self._content.trans_dark_current_label.setEnabled(is_checked)
self._content.trans_dark_current_edit.setEnabled(is_checked)
self._content.trans_dark_current_button.setEnabled(is_checked)
self._content.trans_dark_current_plot_button.setEnabled(is_checked)
def get_data_info(self):
"""
Retrieve information from the data file and update the display
"""
if self._data_proxy is None:
return
fname = str(self._content.background_edit.text())
if len(str(fname).strip())>0:
dataproxy = self._data_proxy(fname, "__background_raw")
if len(dataproxy.errors)>0:
return
self._settings.last_data_ws = dataproxy.data_ws
if dataproxy.sample_detector_distance is not None:
self._content.sample_dist_edit.setText(str(dataproxy.sample_detector_distance))
util._check_and_get_float_line_edit(self._content.sample_dist_edit, min=0.0)
if dataproxy.wavelength is not None:
self._content.wavelength_edit.setText(str(dataproxy.wavelength))
util._check_and_get_float_line_edit(self._content.wavelength_edit, min=0.0)
if dataproxy.wavelength_spread is not None:
self._content.wavelength_spread_edit.setText(str(dataproxy.wavelength_spread))
| mganeva/mantid | scripts/Interface/reduction_gui/widgets/sans/hfir_background.py | Python | gpl-3.0 | 12,663 |
# Django
from django.conf.urls import url
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$',
TemplateView.as_view(template_name='home/home.html'),
name='index'),
]
| systers/vms | vms/home/urls.py | Python | gpl-2.0 | 208 |
"""Test PsychoPy sound.py using pyo backend
"""
from __future__ import division
from builtins import object
from past.utils import old_div
import pytest
from scipy.io import wavfile
import shutil, os
from tempfile import mkdtemp
import numpy as np
from psychopy import prefs, core
from psychopy import sound, microphone
from psychopy.tests.utils import TESTS_DATA_PATH
from psychopy.constants import PY3
if PY3:
from importlib import reload
origSoundPref = prefs.hardware['audioLib']
# py.test --cov-report term-missing --cov sound.py tests/test_sound/test_sound_pyo.py
@pytest.mark.needs_sound
class TestPyo(object):
@classmethod
def setup_class(self):
prefs.hardware['audioLib'] = ['pyo']
reload(sound) # to force our new preference to be used
self.contextName='pyo'
try:
assert sound.Sound == sound.SoundPyo
except Exception:
pytest.xfail('need to be using pyo')
self.tmp = mkdtemp(prefix='psychopy-tests-sound')
# ensure some good test data:
testFile = 'green_48000.flac.dist'
new_wav = os.path.join(self.tmp, testFile.replace('.dist', ''))
shutil.copyfile(os.path.join(TESTS_DATA_PATH, testFile), new_wav)
w = microphone.flac2wav(new_wav)
r, d = wavfile.read(w)
assert r == 48000
assert len(d) == 92160
self.testFile = os.path.join(self.tmp, 'green_48000.wav')
@classmethod
def teardown_class(self):
prefs.hardware['audioLib'] = origSoundPref
if hasattr(self, 'tmp'):
shutil.rmtree(self.tmp, ignore_errors=True)
def test_init(self):
for note in ['A', 440, '440', [1,2,3,4], np.array([1,2,3,4])]:
sound.Sound(note, secs=.1)
with pytest.raises(ValueError):
sound.Sound('this is not a file name')
with pytest.raises(ValueError):
sound.Sound(-1) #negative frequency makes no sense
with pytest.raises(DeprecationWarning):
sound.setaudioLib('foo')
points = 100
snd = old_div(np.ones(points), 20)
s = sound.Sound(self.testFile)
def test_play(self):
s = sound.Sound(secs=0.1)
s.play()
core.wait(s.getDuration()+.1) # allows coverage of _onEOS
s.play(loops=1)
core.wait(s.getDuration()*2+.1)
s.play(loops=-1)
s.stop()
def test_start_stop(self):
"""only relevant for sound from files"""
s1 = sound.Sound(self.testFile, start=0.5, stop=1.5)
assert s1.getDuration() == 1
s2 = sound.Sound(self.testFile, start=0.5)
s3 = sound.Sound(self.testFile)
assert s3.getDuration() > s2.getDuration() > s1.getDuration()
s4 = sound.Sound(self.testFile, start=-1, stop=10000)
assert s4.getDuration() == s3.getDuration()
def test_methods(self):
s = sound.Sound(secs=0.1)
v = s.getVolume()
assert v == 1
s.setVolume(0.5)
assert s.getVolume() == 0.5
s.setLoops(2)
assert s.getLoops() == 2
def test_reinit_pyo(self):
pytest.skip()
# was stalling on some machines; revisit if decide to stick with pyo
sound.initPyo()
| hoechenberger/psychopy | psychopy/tests/test_sound/test_sound_pyo.py | Python | gpl-3.0 | 3,229 |
from cbagent.collectors.collector import Collector
class NSServer(Collector):
COLLECTOR = "ns_server"
def _get_stats_uri(self):
for bucket, stats in self.get_buckets(with_stats=True):
uri = stats["uri"]
yield uri, bucket # cluster wide
def _get_stats(self, uri):
samples = self.get_http(path=uri) # get last minute samples
stats = {}
if samples["op"]["lastTStamp"] == 0:
# Index and N1QL nodes don't have stats in ns_server
return None
for metric, values in samples['op']['samples'].items():
metric = metric.replace('/', '_')
stats[metric] = values[-1] # only the most recent sample
return stats
def sample(self):
for uri, bucket in self._get_stats_uri():
stats = self._get_stats(uri)
if not stats:
continue
self.update_metric_metadata(stats.keys(), bucket)
self.store.append(stats, cluster=self.cluster, bucket=bucket,
collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for bucket in self.get_buckets():
self.mc.add_bucket(bucket)
class NSServerOverview(NSServer):
METRICS = 'ops',
def _get_overview_stats(self):
overview = self.get_http(path='/pools/default/overviewStats')
stats = {}
for metric, values in overview.items():
stats[metric] = values[-1] # only the most recent sample
return stats
def sample(self):
overview = self._get_overview_stats()
if not overview:
return
self.store.append(overview, cluster=self.cluster,
collector=self.COLLECTOR)
def update_metadata(self):
self.update_metric_metadata(self.METRICS)
class NSServerSystem(NSServer):
COLLECTOR = "ns_server_system"
METRICS = 'cpu_utilization',
def _get_system_stats(self):
all_stats = self.get_http(path='/pools/default')
server_stats = {}
for node in all_stats["nodes"]:
stats = {}
server = node["hostname"].split(":")[0]
stats["cpu_utilization"] = node["systemStats"]["cpu_utilization_rate"]
server_stats[server] = stats
return server_stats
def sample(self):
server_stats = self._get_system_stats()
if not server_stats:
return
for server, stats in server_stats.items():
self.store.append(stats, cluster=self.cluster,
server=server,
collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for node in self.get_nodes():
self.mc.add_server(node)
self.update_metric_metadata(self.METRICS, server=node)
class XdcrStats(Collector):
COLLECTOR = "xdcr_stats"
def _get_stats_uri(self):
for bucket in self.get_buckets():
uri = '/_uistats?bucket={}&zoom=minute'.format(bucket)
yield bucket, uri
def _get_stats(self, bucket, uri):
samples = self.get_http(path=uri)
stats = dict()
for metric, values in samples['stats']['@xdcr-{}'.format(bucket)].items():
if 'replications' in metric:
metric = metric.split('/')[-1]
stats[metric] = values[-1]
return stats
def sample(self):
for bucket, uri in self._get_stats_uri():
stats = self._get_stats(bucket, uri)
if not stats:
continue
self.update_metric_metadata(stats.keys(), bucket)
self.store.append(stats, cluster=self.cluster, bucket=bucket,
collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for bucket in self.get_buckets():
self.mc.add_bucket(bucket)
| couchbase/perfrunner | cbagent/collectors/ns_server.py | Python | apache-2.0 | 3,972 |
#!/usr/bin/env python
import sys,os,random,errno
import numpy as np
import argparse
from argparse import RawTextHelpFormatter
# ---------------- Arguments
parser = argparse.ArgumentParser(description='''Take as input time serial data (one locus). The output can be either mles or likelihood surface over a grid or the mles'
An example of a run: ./ancientselection.py -i TestData_sel0_Jeff.py --dirout Out --codedir ../ancientselection/ --run run1 --exhaust''',formatter_class=RawTextHelpFormatter)
#./ancientselection.py -i TestData_sel0_Jeff.py --dirout Out --codedir ../ancientselection/ --run run1 --exhaust cube
#parser = argparse.ArgumentParser(description='''Take as input time serial data. The output can be either a likelihood values for a grid or the mles'
#An example of a run: ./main_optimize_nelder.py TestData_Jeff.py 400 Oct4run1''',formatter_class=argparse.ArgumentDefaultsHelpFormatter)
#parser.add_argument('--mikefile', help='mikeinfile',type=argparse.FileType('r'),required=True)
#parser.add_argument('--toselect', help='list of indivs to select from mikefile, one per line',type=str,required=True)
#parser.add_argument('--toorder', help='list of pops to reorder indivs from mikefile, one per line',type=str,required=True)
#parser.add_argument('--mikenew', help='mike formatted output file',type=argparse.FileType('wa'),required=True)
helpdatafile = '''datafile is a python script that contains
the data in the following format
For the data itself, here is an example:
M_ = [10,10,10] #for 10 chrom at 3 time points
I_ = [3,3,3] #for 3 derived alleles at each time point
T_ = [-100,-50,0] #the time points in generations
dominance_ = 0.5 #if codominance
where;
M_: python list with the total number of chromosomes
I_: python list wiyth the number of derived alleles
T_: python list with the sampling times generations
dominance_: float with the dominance coefficient for the data (usually 0, 0.5 or 1)
For the parameters:
e.g.
Upper_bounds_ = [0,10,1000] #(t0_up,gamma_up,Ne_up)
Lower_bounds_ = [-150,-10,500] #(t0_low,gamma_low,Ne_low)
fixed_params_ = [None,None,1000] #for t0 and gamma to be free while the pop size is set to 1000.
where:
Upper_bounds_ = python list (t0_up,gamma_up,Ne_up) upper bounds for
t0, gamma and Ne (in this order!!)
Lower_bounds_ = python list (t0_low,gamma_low,Ne_low) lower bounds for
t0, gamma and Ne (in this order!!)
fixed_params = python list indicating which parameters should be fixed
(same order: t0,gamma,Ne). The value
is set to None if the parameters is not to be fixed or to the value it
should be fixed at. The fixed values should be compatible with the bounds.
'''
parser.add_argument('--version','-v', action='version', version='%(prog)s 0.0')
parser.add_argument('--datafile','-i', help=helpdatafile,type=str,required=True)
parser.add_argument('--run','-r', help='''added string to the project name,
only used to label output files (by default it is the datafile name minus '.py' extension)''',type=str,required=False,default='')
parser.add_argument('--dirout', help='directory out (if does not exist, will be created) --default Out',type=str,required=False, default='Out')
parser.add_argument('--codedir', help='directory where the code lives (to be added to your path)',type=str,required=True,default="../bin")
parser.add_argument('--gridsize', help='size of the grid (H) -- default 400',type=int,required=False,default=400)
parser.add_argument('--gridtype', help='type of grid, either of (default,symmetric,uniform,expo) --default default',type=str,required=False,default='default')
parser.add_argument('--expmethod1', help='''exponential method 1, used always if gamma small enough,\neither of (alglin,pade,prec)
alglin: in detail in the paper
pade: implemented in scipy
prec: arbitrary precision, the grid has to be the default grid: !!not ready yet!!!
--default alglin''',type=str,required=False,default='alglin')
parser.add_argument('--expmethod2', help='''exponential method 2 (see above)
used for large abs(gamma),\neither of (pade,prec)
prec: not implemented yet
-- default pade''',type=str,required=False,default='pade')
parser.add_argument('--exhaust', help='''computes the likelihood on a grid:
either cube or predefinite running time
(usage --exhaust cube or --exhaust time)
Note: if --exhaust not specified will try
to find the maximum likelihood using a
nelder-mead algorithm -- default cube)''',required=False,default=False)
parser.add_argument('--T0dim', help='number of evaluations for the age (default 5), only in use if --exhaust cube',type=int,required=False,default=5)
parser.add_argument('--Gammadim', help='number of evaluations for gamma (default 5), only in use if --exhaust cube',type=int,required=False,default=5)
parser.add_argument('--NEdim', help='number of evaluations for Ne (default 5), only in use if --exhaust cube',type=int,required=False,default=5)
parser.add_argument('--runningtime', help='''only in use if --cube time,
you can specify how long you want it to run.
The number of points per paramaters will be the same (if not fixed) -- default 300''',type=int,required=False,default=5*60)
parser.add_argument('--nonconditional', help='''likelihood either conditional (default) on allele segragating at the last sampling time
-- default is to condition, i.e. without --nonconditional flag''',action='store_true',required=False,default=False)
parser.add_argument('--verbose', help='increase standard out',action='store_true',required=False,default=False)
parser.add_argument('--debug', help='debug, lots of standard out',action='store_true',required=False,default=False)
args = parser.parse_args()
# parse all arguments
verbose = args.verbose
debug = args.debug
datafile = args.datafile
run = args.run
project = datafile.split('.py')[0]+'_'+run
dirout = args.dirout
codedir = args.codedir #(to add to the path)
H = args.gridsize
gridtype = args.gridtype
expmethod1 = args.expmethod1
expmethod2 = args.expmethod2
exhaust = args.exhaust
runningtime = args.runningtime
T0dim = args.T0dim
Gammadim = args.Gammadim
NEdim = args.NEdim
nonconditional = args.nonconditional
#parse datafile
execfile(datafile)
#append path etc.
sys.path.append(codedir)
import inference ## ll function
import optimize ## nelder mead and exhaustive
import funcs ##for the domain definition
#create output directory
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
if not os.path.isdir(dirout):
if verbose:
print "your outputdir does not exist, created now"
mkdir_p(dirout)
#"check" the datafile
try:
data=(I_,M_,T_)
dominance_
except:
print "Something is missing in your datafile!"
print "Check that you have all variables\nI_,M_,T_,dominance_\nUpper_bounds_,Lower_bounds_,fixed_params_ defined in your datafile"
sys.exit()
#check the mode
if exhaust and exhaust not in ['cube','time']:
print "--exhaust can only be followed by cube or time"
sys.exit()
# print output
if verbose:
print "Datafile:\t",datafile
print "project name:\t",project
print "Data:"
print "M:\t",M_
print "I:\t",I_
print "T:\t",T_
print "dominance:\t",dominance_
print "Parameters:"
print "Upper Bounds:\t",Upper_bounds_
print "Lower Bounds:\t",Lower_bounds_
print "Fixed params:\t",fixed_params_
print "Grid size (H):\t",H
print "Grid type:\t",gridtype
print "expmethod1: ",expmethod1
print "expmethod2: ",expmethod2
if nonconditional == True:
print "Will use the unconditional process (Q matrix)..."
else:
print "Will use the conditional matrix (q matrix)..."
if exhaust:
print "exhaustive mode"
else:
print "optimization mode (nelder-mead)"
if debug: print "Debugging ..."
#further checks: same size of M, I, T
if len(set([len(M_),len(T_),len(I_)]))!=1:
print "M_,I_,T_: all have to be the same length!!"
print "ckeck: "
print M_,I_,T_
sys.exit()
if len(set([len(Upper_bounds_),len(Lower_bounds_),len(fixed_params_)]))!=1:
print "Upper_bounds_,Lower_bounds_,fixed_params_: all have to be the same length!!"
print "ckeck: "
print Upper_bounds_,Lower_bounds_,fixed_params_
sys.exit()
t0_low,gamma_low,Ne_low=Lower_bounds_
t0_up,gamma_up,Ne_up=Upper_bounds_
if nonconditional:
ll_func = inference.ll
else:
ll_func = inference.ll_q
dico_ll=dict(data=data,H=H,verbose=debug, expmethod=expmethod1,
expmethodsecond=expmethod2,loga=1,grid='default',
positive=1,dominance=dominance_,threshold=1e-4,
numberprecision=128,thresholdfrerot=1e-5)
### compute the likelihood on one set of values (make sure dictionaries have all they need)
if debug:
p0gamma=0.01
p0Ne=random.uniform(Ne_low,Ne_up)
p0t=random.uniform(t0_low,T_[0])
pinput = [p0t,p0gamma,p0Ne]
print "Testing that the ll function is well defined..."
print "pinput: ",pinput
trial_ll=ll_func(pinput,**dico_ll)
#print "trial_ll ",trial_ll
if debug:
print "debug ",debug
####
if not exhaust:
Popt=[]
P0=[]
Likopt=[]
Warnflags=[]
Domains=funcs.domains(data = data,smallert = t0_low,fixed_time = fixed_params_[0])
dico_nelder=dict(xtol=0.0001, ftol=0.0001,
maxiter=1000, maxfun=2000, full_output=1,
disp=1, retall=1, callback=None)
if verbose:
print "Trying to find mles for each domain"
for count,domt in enumerate(Domains):
count+=1
#raw_input()
p0t=random.uniform(domt[0],domt[1])
p0gamma=random.uniform(gamma_low,gamma_up)
p0Ne=random.uniform(Ne_low,Ne_up)
p0=[p0t,p0gamma,p0Ne]
# print "initial values: ",p0
P0.append(p0)
lower_bound=domt[0],gamma_low,Ne_low
upper_bound=domt[1],gamma_up,Ne_up
if verbose:
print "Domain number: ",count,", domt: ",domt
print "Starting values: ",p0
print "current lower bound ",lower_bound
print "current upper bound ",upper_bound
print "fixed_params ",fixed_params_
func_ex=ll_func
#currently default maxiter=100, maxfun=20
xopt,fopt,iteration,funcalls,warnflag,allvecs = optimize.neldermead(
model_func=func_ex,p0=p0,fixed_params=fixed_params_,
flush_delay=0.5,verbose_obj=debug,
lower_bound_obj=lower_bound, upper_bound_obj=upper_bound,
args_nelder=dico_nelder,
args_ll=dico_ll)
if verbose: print 'popt: ',xopt
Popt.append(xopt)
Likopt.append(fopt)
Warnflags.append(warnflag)
#print results to the screen
print "Run %s finished!!"%run
print "Starting parameters:\n",P0
print "MLEs:\n",Popt
print "Maximum likelihood:\n",Likopt
print "Warnflags (warnflag!=0, neldermead did not converge):\n",Warnflags
Likopt=np.array(Likopt)
np.save("%s/Pstart_%s"%(dirout,project),P0)
np.save("%s/Popt_%s"%(dirout,project),Popt)
np.save("%s/Likopt_%s"%(dirout,project),Likopt)
np.save("%s/Warnflags_%s"%(dirout,project),Warnflags)
elif exhaust:
if verbose:
print "Starting exhaustive computation ..."
if exhaust=='cube':
#Define the cube
Grid = T0dim,Gammadim,NEdim
print "Computation on a grid, where each T0 Gamma and NE have dims: ",Grid
Popt,maxlik,Lik_array,T0,Gamma,NE,Tinput=optimize.exhaustive_search(ll_func,data=data,H=H,Grid=Grid,running_time=None,Bounds=[(t0_low,t0_up),(gamma_low,gamma_up),(Ne_low,Ne_up)],fixed_params=fixed_params_,verbose=debug,dominance=dominance_,verboselik=debug)
elif exhaust=='time':
Popt,maxlik,Lik_array,T0,Gamma,NE,Tinput=optimize.exhaustive_search(ll_func,data=data,H=H,Grid=None,running_time=runningtime,Bounds=[(t0_low,t0_up),(gamma_low,gamma_up),(Ne_low,Ne_up)],fixed_params=fixed_params_,verbose=debug,dominance=dominance_,verboselik=debug)
optimize.saveoutput(Lik_array=Lik_array,T0=T0,Gamma=Gamma,NE=NE,Tinput=Tinput,project=project,directory=dirout,verbose=1)
print "-------------------------------"
print "Run %s finished!!"%run
print "MLEs (over exhaustive search):\n",Popt
print "Maximum likelihood (exhaustive search):\n",maxlik
np.save("%s/Popt_%s"%(dirout,project),Popt)
np.save("%s/Likopt_%s"%(dirout,project),maxlik)
if verbose:
print "Grid dimensions: ",Lik_array.shape
print "Popt: ",Popt
print "maxlik: ",maxlik
| sapfo/ancientselection | ancientselection/ancientselection.py | Python | gpl-3.0 | 12,851 |
# -*- coding: utf-8 -*-
from datetime import datetime
from django.test import TestCase
from django.conf import settings
from django.utils import timezone
from .factories import UserFactory, LocationFactory
from .models import Media, User, Tag, Location
from .api import InstagramError
USER_ID = 237074561 # tnt_online
USER_PRIVATE_ID = 176980649
USER_ID_2 = 775667951 # about 200 media
USER_ID_3 = 1741896487 # about 400 followers
MEDIA_ID = '934625295371059186_205828054'
MEDIA_ID_2 = '806703315661297054_190931988' # media without caption
LOCATION_ID = 1
TAG_NAME = "snowyday"
TAG_SEARCH_NAME = "snowy"
LOCATION_SEARCH_NAME = "Dog Patch Labs"
TOKEN = '1687258424.0fdde74.9badafabca4e49df90da02798db6bf02'
INSTAGRAM_USERNAME = 'atsepk'
INSTAGRAM_PASSWORD = 'Jh6#dFwEHc'
class InstagramApiTestCase(TestCase):
_settings = None
def setUp(self):
context = getattr(settings, 'SOCIAL_API_CALL_CONTEXT', {})
self._settings = dict(context)
context.update({'instagram': {'token': TOKEN}})
def tearDown(self):
setattr(settings, 'SOCIAL_API_CALL_CONTEXT', self._settings)
class UserTest(InstagramApiTestCase):
def setUp(self):
super(UserTest, self).setUp()
self.time = timezone.now()
def test_fetch_user_by_name(self):
u = User.remote.get_by_slug('tnt_online')
self.assertEqual(int(u.id), USER_ID)
self.assertEqual(u.username, 'tnt_online')
self.assertEqual(u.full_name, u'Телеканал ТНТ')
self.assertGreater(len(u.profile_picture), 0)
self.assertGreater(len(u.website), 0)
def test_search_users(self):
users = User.remote.search('tnt_online')
self.assertGreater(len(users), 0)
for user in users:
self.assertIsInstance(user, User)
def test_fetch_user(self):
u = User.remote.fetch(USER_ID)
self.assertEqual(int(u.id), USER_ID)
self.assertEqual(u.username, 'tnt_online')
self.assertEqual(u.full_name, u'Телеканал ТНТ')
self.assertGreater(len(u.profile_picture), 0)
self.assertGreater(len(u.website), 0)
self.assertGreater(u.followers_count, 0)
self.assertGreater(u.follows_count, 0)
self.assertGreater(u.media_count, 0)
self.assertGreater(u.fetched, self.time)
u.followers_count = None
u.save()
self.assertIsNone(u.followers_count)
u.refresh()
self.assertGreater(u.followers_count, 0)
u = User.objects.get(id=u.id)
self.assertGreater(u.followers_count, 0)
def test_fetch_user_follows_graphql(self):
u = User.remote.fetch(USER_ID_3)
self.assertEqual(u.is_private, False)
settings_temp = dict(OAUTH_TOKENS_INSTAGRAM_USERNAME=INSTAGRAM_USERNAME,
OAUTH_TOKENS_INSTAGRAM_PASSWORD=INSTAGRAM_PASSWORD)
with self.settings(**settings_temp):
users = u.fetch_follows(source='graphql')
self.assertGreaterEqual(u.follows_count, 996)
self.assertEqual(u.follows_count, users.count())
self.assertEqual(u.follows_count, User.objects.count() - 1)
def test_fetch_user_followers_graphql(self):
u = User.remote.fetch(USER_ID_3)
self.assertEqual(u.is_private, False)
settings_temp = dict(OAUTH_TOKENS_INSTAGRAM_USERNAME=INSTAGRAM_USERNAME,
OAUTH_TOKENS_INSTAGRAM_PASSWORD=INSTAGRAM_PASSWORD)
with self.settings(**settings_temp):
users = u.fetch_followers(source='graphql')
self.assertGreaterEqual(u.follows_count, 754)
self.assertEqual(u.followers_count, users.count())
self.assertEqual(u.followers_count, User.objects.count() - 1)
def test_fetch_user_follows(self):
u = User.remote.fetch(USER_ID_3)
self.assertEqual(u.is_private, False)
users = u.fetch_follows()
self.assertGreaterEqual(u.follows_count, 970)
self.assertEqual(u.follows_count, users.count())
def test_fetch_user_followers(self):
u = User.remote.fetch(USER_ID_3)
self.assertEqual(u.is_private, False)
users = u.fetch_followers()
self.assertGreaterEqual(u.followers_count, 750)
self.assertEqual(u.followers_count, users.count())
# check counts for any first public follower
for f in users:
self.assertIsNone(f.followers_count)
self.assertIsNone(f.follows_count)
self.assertIsNone(f.media_count)
f = User.remote.fetch(f.id)
if f.is_private is False:
self.assertIsNotNone(f.followers_count)
self.assertIsNotNone(f.follows_count)
self.assertIsNotNone(f.media_count)
break
# fetch followers once again and check counts
u.fetch_followers()
f = User.objects.get(id=f.id)
self.assertIsNotNone(f.followers_count)
self.assertIsNotNone(f.follows_count)
self.assertIsNotNone(f.media_count)
def test_fetch_users_with_full_name_bad_overlength(self):
user = User.remote.get_by_slug('stasplot')
self.assertEqual(user.full_name, u'Stas from Ishim Ишим Тюмень Tymen region Тюмень') # noqa
user = User.remote.fetch(47274770)
self.assertEqual(user.full_name, u'Stas from Ishim Ишим Тюмень Ty')
user = User.remote.get_by_slug('keratin_krasnodar1')
self.assertEqual(user.full_name, u'Кератин, Ботокс в Краснодаре \ud83c\udf80') # noqa
user = User.remote.fetch(2057367004)
self.assertEqual(user.full_name, u'Кератин, Ботокс в Краснодаре ')
user = User.remote.get_by_slug('beautypageantsfans')
self.assertEqual(user.full_name, u'I Am A Girl \xbfAnd What?\ud83d\udc81\ud83c\udffb\u2728\ud83d\udc51\ud83d\udc8b') # noqa
user = User.remote.fetch(1164190771)
self.assertEqual(user.full_name, u'I Am A Girl \xbfAnd What?\ud83d\udc81\ud83c\udffb\u2728\ud83d\udc51')
def test_fetch_duplicate_user(self):
u = UserFactory(id=0, username='tnt_online')
self.assertEqual(User.objects.count(), 1)
self.assertNotEqual(int(u.id), USER_ID)
self.assertEqual(u.username, 'tnt_online')
u = User.remote.fetch(USER_ID)
self.assertEqual(User.objects.count(), 1)
self.assertEqual(int(u.id), USER_ID)
self.assertEqual(u.username, 'tnt_online')
def test_fetch_duble_duplicate_user(self):
u1 = UserFactory(username='tnt_online', id=8910216)
u2 = UserFactory(username='bmwru', id=237074561)
self.assertEqual(User.objects.count(), 2)
self.assertEqual(int(u1.id), 8910216)
self.assertEqual(int(u2.id), 237074561)
self.assertEqual(u1.username, 'tnt_online')
self.assertEqual(u2.username, 'bmwru')
u1 = User.remote.fetch(8910216)
u2 = User.remote.fetch(237074561)
self.assertEqual(User.objects.count(), 2)
self.assertEqual(int(u1.id), 8910216)
self.assertEqual(int(u2.id), 237074561)
self.assertEqual(u1.username, 'bmwru')
self.assertEqual(u2.username, 'tnt_online')
def test_fetch_real_duplicates_user(self):
UserFactory(id=2116301016)
User.remote.fetch(2116301016)
with self.assertRaises(InstagramError):
User.remote.get(1206219929)
try:
User.remote.get(1206219929)
except InstagramError as e:
self.assertEqual(e.code, 400)
def test_fetch_private_user(self):
with self.assertRaises(InstagramError):
User.remote.fetch(USER_PRIVATE_ID)
try:
User.remote.fetch(USER_PRIVATE_ID)
except InstagramError as e:
self.assertEqual(e.code, 400)
userf = UserFactory(id=USER_PRIVATE_ID)
user = User.remote.fetch(USER_PRIVATE_ID)
self.assertEqual(userf, user)
self.assertFalse(userf.is_private)
self.assertTrue(user.is_private)
userf.refresh()
self.assertTrue(userf.is_private)
def test_unexisted_user(self):
with self.assertRaises(InstagramError):
User.remote.get(0)
try:
User.remote.get(0)
except InstagramError as e:
self.assertEqual(e.code, 400)
class MediaTest(InstagramApiTestCase):
def setUp(self):
super(MediaTest, self).setUp()
self.time = timezone.now()
def test_fetch_media(self):
m = Media.remote.fetch(MEDIA_ID)
self.assertEqual(m.remote_id, MEDIA_ID)
self.assertGreater(len(m.caption), 0)
self.assertGreater(len(m.link), 0)
self.assertGreater(m.comments_count, 0)
self.assertGreater(m.likes_count, 0)
self.assertGreater(m.fetched, self.time)
self.assertIsInstance(m.created_time, datetime)
# specifying timezone and then making it naive again
self.assertEqual(m.created_time, timezone.make_aware(m.created_time, timezone.get_current_timezone()).replace(tzinfo=None))
self.assertEqual(m.type, 'video')
self.assertEqual(m.filter, 'Normal')
self.assertGreater(len(m.image_low_resolution), 0)
self.assertGreater(len(m.image_standard_resolution), 0)
self.assertGreater(len(m.image_thumbnail), 0)
self.assertGreater(len(m.video_low_bandwidth), 0)
self.assertGreater(len(m.video_low_resolution), 0)
self.assertGreater(len(m.video_standard_resolution), 0)
self.assertGreater(m.comments.count(), 0)
self.assertGreater(m.tags.count(), 0)
# self.assertGreater(m.likes_users.count(), 0)
# media without caption test
m = Media.remote.fetch(MEDIA_ID_2)
self.assertEqual(len(m.caption), 0)
self.assertEqual(m.type, 'image')
self.assertGreater(len(m.image_low_resolution), 0)
self.assertGreater(len(m.image_standard_resolution), 0)
self.assertGreater(len(m.image_thumbnail), 0)
self.assertGreater(m.comments.count(), 0)
# self.assertGreater(m.likes_users.count(), 0)
def test_fetch_user_media_count(self):
u = UserFactory(id=USER_ID)
media = u.fetch_media(count=100)
m = media[0]
self.assertEqual(media.count(), 100)
self.assertEqual(m.user, u)
self.assertGreater(len(m.caption), 0)
self.assertGreater(len(m.link), 0)
self.assertGreater(m.comments_count, 0)
self.assertGreater(m.likes_count, 0)
self.assertGreater(m.fetched, self.time)
self.assertIsInstance(m.created_time, datetime)
def test_fetch_user_media(self):
u = User.remote.fetch(USER_ID_2)
media = u.fetch_media()
self.assertGreater(media.count(), 210)
self.assertEqual(media.count(), u.media_count)
self.assertEqual(media.count(), u.media_feed.count())
after = media.order_by('-created_time')[50].created_time
Media.objects.all().delete()
self.assertEqual(u.media_feed.count(), 0)
media = u.fetch_media(after=after)
self.assertEqual(media.count(), 53) # not 50 for some strange reason
self.assertEqual(media.count(), u.media_feed.count())
def test_fetch_media_with_location(self):
media = Media.remote.fetch('1105137931436928268_1692711770')
self.assertIsInstance(media.location, Location)
self.assertEqual(media.location.name, 'Prague, Czech Republic')
def test_fetch_comments(self):
m = Media.remote.fetch(MEDIA_ID)
comments = m.fetch_comments()
self.assertGreater(m.comments_count, 0)
# TODO: 84 != 80 strange bug of API, may be limit of comments to fetch
# self.assertEqual(m.comments_count, len(comments))
c = comments[0]
self.assertEqual(c.media, m)
self.assertGreater(len(c.text), 0)
self.assertGreater(c.fetched, self.time)
self.assertIsInstance(c.created_time, datetime)
def test_fetch_likes(self):
m = Media.remote.fetch(MEDIA_ID)
likes = m.fetch_likes()
self.assertGreater(m.likes_count, 0)
# TODO: 2515 != 117 how to get all likes?
# self.assertEqual(m.likes_count, likes.count())
self.assertIsInstance(likes[0], User)
class TagTest(InstagramApiTestCase):
def test_fetch_tag(self):
t = Tag.remote.fetch(TAG_NAME)
self.assertEqual(t.name, TAG_NAME)
self.assertGreater(t.media_count, 0)
def test_search_tags(self):
tags = Tag.remote.search(TAG_SEARCH_NAME)
self.assertGreater(len(tags), 0)
for tag in tags:
self.assertIsInstance(tag, Tag)
def test_fetch_tag_media(self):
t = Tag.remote.fetch("merrittislandnwr")
media = t.fetch_media()
self.assertGreater(media.count(), 0)
self.assertEqual(media.count(), t.media_feed.count())
class LocationTest(InstagramApiTestCase):
def test_fetch_location(self):
location = Location.remote.fetch(LOCATION_ID)
self.assertEqual(location.id, LOCATION_ID)
self.assertEqual(location.name, "Dog Patch Labs")
self.assertEqual(location.latitude, 37.782492553)
self.assertEqual(location.longitude, -122.387785235)
self.assertEqual(location.media_count, None)
def test_search_locations(self):
locations = Location.remote.search(lat=37.782492553, lng=-122.387785235)
self.assertGreater(len(locations), 0)
for location in locations:
self.assertIsInstance(location, Location)
def test_fetch_location_media(self):
location = LocationFactory(id=LOCATION_ID)
media = location.fetch_media()
self.assertGreater(media.count(), 0)
self.assertEqual(media.count(), location.media_feed.count())
self.assertEqual(media.count(), location.media_count)
# class InstagramApiTest(UserTest, MediaTest):
# def call(api, *a, **kw):
# raise InstagramAPIError(503, "Rate limited", "Your client is making too many request per second")
#
# @mock.patch('instagram.client.InstagramAPI.user', side_effect=call)
# @mock.patch('instagram_api.api.InstagramApi.repeat_call',
# side_effect=lambda *a, **kw: models.User.object_from_dictionary({'id': '205828054'}))
# def test_client_rate_limit(self, call, repeat_call):
# self.assertGreaterEqual(len(CLIENT_IDS), 2)
# User.remote.fetch(USER_ID_2)
# self.assertEqual(call.called, True)
# self.assertEqual(repeat_call.called, True)
| ramusus/django-instagram-api | instagram_api/tests.py | Python | bsd-3-clause | 14,673 |
'''
Created on Jun 1, 2015
@author: joep
'''
from unittest import TestCase
import pygame
from util.rect import are_touching
class RectTest(TestCase):
def test_are_touching(self):
rect_1 = pygame.Rect(3, 3, 1, 1)
self.assertTrue(are_touching(rect_1, pygame.Rect(2, 3, 1, 1)))
self.assertTrue(are_touching(rect_1, pygame.Rect(3, 2, 1, 1)))
self.assertTrue(are_touching(rect_1, pygame.Rect(4, 3, 1, 1)))
self.assertTrue(are_touching(rect_1, pygame.Rect(3, 4, 1, 1)))
self.assertTrue(are_touching(rect_1, pygame.Rect(1, 3, 2, 1)))
self.assertTrue(are_touching(rect_1, pygame.Rect(3, 1, 1, 2)))
self.assertTrue(are_touching(rect_1, pygame.Rect(4, 3, 2, 1)))
self.assertTrue(are_touching(rect_1, pygame.Rect(3, 4, 1, 2)))
self.assertTrue(are_touching(rect_1, pygame.Rect(1, 2, 3, 1)))
self.assertTrue(are_touching(rect_1, pygame.Rect(2, 1, 1, 3)))
self.assertTrue(are_touching(rect_1, pygame.Rect(4, 2, 1, 3)))
self.assertTrue(are_touching(rect_1, pygame.Rect(1, 4, 3, 1)))
self.assertFalse(are_touching(rect_1, pygame.Rect(2, 2, 1, 1)))
self.assertFalse(are_touching(rect_1, pygame.Rect(4, 2, 1, 1)))
self.assertFalse(are_touching(rect_1, pygame.Rect(4, 4, 1, 1)))
self.assertFalse(are_touching(rect_1, pygame.Rect(2, 4, 1, 1)))
self.assertFalse(are_touching(rect_1, pygame.Rect(1, 1, 4, 1)))
self.assertFalse(are_touching(rect_1, pygame.Rect(1, 5, 4, 1)))
self.assertFalse(are_touching(rect_1, pygame.Rect(1, 1, 1, 4)))
self.assertFalse(are_touching(rect_1, pygame.Rect(5, 1, 1, 4)))
self.assertFalse(are_touching(rect_1, pygame.Rect(3, 1, 4, 1)))
self.assertFalse(are_touching(rect_1, pygame.Rect(3, 5, 4, 1)))
self.assertFalse(are_touching(rect_1, pygame.Rect(3, 1, 1, 4)))
self.assertFalse(are_touching(rect_1, pygame.Rect(5, 3, 1, 4))) | JoepDriesen/Township | Township/util/test/test_rect.py | Python | gpl-3.0 | 2,000 |
# -*- coding: utf-8 -*-
'''
The EC2 Cloud Module
====================
The EC2 cloud module is used to interact with the Amazon Elastic Cloud
Computing. This driver is highly experimental! Use at your own risk!
To use the EC2 cloud module, set up the cloud configuration at
``/etc/salt/cloud.providers`` or ``/etc/salt/cloud.providers.d/ec2.conf``:
.. code-block:: yaml
my-ec2-config:
# The EC2 API authentication id
id: GKTADJGHEIQSXMKKRBJ08H
# The EC2 API authentication key
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
# The ssh keyname to use
keyname: default
# The amazon security group
securitygroup: ssh_open
# The location of the private key which corresponds to the keyname
private_key: /root/default.pem
# Be default, service_url is set to amazonaws.com. If you are using this
# driver for something other than Amazon EC2, change it here:
service_url: amazonaws.com
# The endpoint that is ultimately used is usually formed using the region
# and the service_url. If you would like to override that entirely, you
# can explicitly define the endpoint:
endpoint: myendpoint.example.com:1138/services/Cloud
provider: ec2
'''
# pylint: disable=E0102
# Import python libs
import os
import copy
import sys
import stat
import time
import uuid
import pprint
import logging
import yaml
# Import libs for talking to the EC2 API
import hmac
import hashlib
import binascii
import datetime
import urllib
import urllib2
# Import salt libs
from salt._compat import ElementTree as ET
# Import salt.cloud libs
import salt.utils.cloud
import salt.config as config
from salt.cloud.libcloudfuncs import * # pylint: disable=W0614,W0401
from salt.cloud.exceptions import (
SaltCloudException,
SaltCloudSystemExit,
SaltCloudConfigError,
SaltCloudExecutionTimeout,
SaltCloudExecutionFailure
)
# Get logging started
log = logging.getLogger(__name__)
SIZE_MAP = {
'Micro Instance': 't1.micro',
'Small Instance': 'm1.small',
'Medium Instance': 'm1.medium',
'Large Instance': 'm1.large',
'Extra Large Instance': 'm1.xlarge',
'High-CPU Medium Instance': 'c1.medium',
'High-CPU Extra Large Instance': 'c1.xlarge',
'High-Memory Extra Large Instance': 'm2.xlarge',
'High-Memory Double Extra Large Instance': 'm2.2xlarge',
'High-Memory Quadruple Extra Large Instance': 'm2.4xlarge',
'Cluster GPU Quadruple Extra Large Instance': 'cg1.4xlarge',
'Cluster Compute Quadruple Extra Large Instance': 'cc1.4xlarge',
'Cluster Compute Eight Extra Large Instance': 'cc2.8xlarge',
}
EC2_LOCATIONS = {
'ap-northeast-1': Provider.EC2_AP_NORTHEAST,
'ap-southeast-1': Provider.EC2_AP_SOUTHEAST,
'eu-west-1': Provider.EC2_EU_WEST,
'sa-east-1': Provider.EC2_SA_EAST,
'us-east-1': Provider.EC2_US_EAST,
'us-west-1': Provider.EC2_US_WEST,
'us-west-2': Provider.EC2_US_WEST_OREGON
}
DEFAULT_LOCATION = 'us-east-1'
DEFAULT_EC2_API_VERSION = '2013-10-01'
if hasattr(Provider, 'EC2_AP_SOUTHEAST2'):
EC2_LOCATIONS['ap-southeast-2'] = Provider.EC2_AP_SOUTHEAST2
# Only load in this module if the EC2 configurations are in place
def __virtual__():
'''
Set up the libcloud functions and check for EC2 configurations
'''
if get_configured_provider() is False:
log.debug(
'There is no EC2 cloud provider configuration available. Not '
'loading module'
)
return False
for provider, details in __opts__['providers'].iteritems():
if 'provider' not in details or details['provider'] != 'ec2':
continue
if not os.path.exists(details['private_key']):
raise SaltCloudException(
'The EC2 key file {0!r} used in the {1!r} provider '
'configuration does not exist\n'.format(
details['private_key'],
provider
)
)
keymode = str(
oct(stat.S_IMODE(os.stat(details['private_key']).st_mode))
)
if keymode not in ('0400', '0600'):
raise SaltCloudException(
'The EC2 key file {0!r} used in the {1!r} provider '
'configuration needs to be set to mode 0400 or 0600\n'.format(
details['private_key'],
provider
)
)
log.debug('Loading EC2 cloud compute module')
return True
def get_configured_provider():
'''
Return the first configured instance.
'''
return config.is_provider_configured(
__opts__,
__active_provider_name__ or 'ec2',
('id', 'key', 'keyname', 'private_key')
)
def _xml_to_dict(xmltree):
'''
Convert an XML tree into a dict
'''
if sys.version_info < (2, 7):
children_len = len(xmltree.getchildren())
else:
children_len = len(xmltree)
if children_len < 1:
name = xmltree.tag
if '}' in name:
comps = name.split('}')
name = comps[1]
return {name: xmltree.text}
xmldict = {}
for item in xmltree:
name = item.tag
if '}' in name:
comps = name.split('}')
name = comps[1]
if not name in xmldict.keys():
if sys.version_info < (2, 7):
children_len = len(item.getchildren())
else:
children_len = len(item)
if children_len > 0:
xmldict[name] = _xml_to_dict(item)
else:
xmldict[name] = item.text
else:
if type(xmldict[name]) is not list:
tempvar = xmldict[name]
xmldict[name] = []
xmldict[name].append(tempvar)
xmldict[name].append(_xml_to_dict(item))
return xmldict
def query(params=None, setname=None, requesturl=None, location=None,
return_url=False, return_root=False):
provider = get_configured_provider()
service_url = provider.get('service_url', 'amazonaws.com')
timestamp = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
if not location:
location = get_location()
if not requesturl:
method = 'GET'
endpoint = provider.get(
'endpoint',
'ec2.{0}.{1}'.format(location, service_url)
)
ec2_api_version = provider.get(
'ec2_api_version',
DEFAULT_EC2_API_VERSION
)
params['AWSAccessKeyId'] = provider['id']
params['SignatureVersion'] = '2'
params['SignatureMethod'] = 'HmacSHA256'
params['Timestamp'] = '{0}'.format(timestamp)
params['Version'] = ec2_api_version
keys = sorted(params.keys())
values = map(params.get, keys)
querystring = urllib.urlencode(list(zip(keys, values)))
uri = '{0}\n{1}\n/\n{2}'.format(method.encode('utf-8'),
endpoint.encode('utf-8'),
querystring.encode('utf-8'))
hashed = hmac.new(provider['key'], uri, hashlib.sha256)
sig = binascii.b2a_base64(hashed.digest())
params['Signature'] = sig.strip()
querystring = urllib.urlencode(params)
requesturl = 'https://{0}/?{1}'.format(endpoint, querystring)
log.debug('EC2 Request: {0}'.format(requesturl))
try:
result = urllib2.urlopen(requesturl)
log.debug(
'EC2 Response Status Code: {0}'.format(
result.getcode()
)
)
except urllib2.URLError as exc:
log.error(
'EC2 Response Status Code: {0} {1}'.format(
exc.code, exc.msg
)
)
root = ET.fromstring(exc.read())
data = _xml_to_dict(root)
if return_url is True:
return {'error': data}, requesturl
return {'error': data}
response = result.read()
result.close()
root = ET.fromstring(response)
items = root[1]
if return_root is True:
items = root
if setname:
if sys.version_info < (2, 7):
children_len = len(root.getchildren())
else:
children_len = len(root)
for item in range(0, children_len):
comps = root[item].tag.split('}')
if comps[1] == setname:
items = root[item]
ret = []
for item in items:
ret.append(_xml_to_dict(item))
if return_url is True:
return ret, requesturl
return ret
def _wait_for_spot_instance(update_callback,
update_args=None,
update_kwargs=None,
timeout=5 * 60,
interval=5,
max_failures=10):
'''
Helper function that waits for a spot instance request to become active
for a specific maximum amount of time.
:param update_callback: callback function which queries the cloud provider
for spot instance request. It must return None if the
required data, running instance included, is not
available yet.
:param update_args: Arguments to pass to update_callback
:param update_kwargs: Keyword arguments to pass to update_callback
:param timeout: The maximum amount of time(in seconds) to wait for the IP
address.
:param interval: The looping interval, ie, the amount of time to sleep
before the next iteration.
:param max_failures: If update_callback returns ``False`` it's considered
query failure. This value is the amount of failures
accepted before giving up.
:returns: The update_callback returned data
:raises: SaltCloudExecutionTimeout
'''
if update_args is None:
update_args = ()
if update_kwargs is None:
update_kwargs = {}
duration = timeout
while True:
log.debug(
'Waiting for spot instance reservation. Giving up in '
'00:{0:02d}:{1:02d}'.format(
int(timeout // 60),
int(timeout % 60)
)
)
data = update_callback(*update_args, **update_kwargs)
if data is False:
log.debug(
'update_callback has returned False which is considered a '
'failure. Remaining Failures: {0}'.format(max_failures)
)
max_failures -= 1
if max_failures <= 0:
raise SaltCloudExecutionFailure(
'Too many failures occurred while waiting for '
'the spot instance reservation to become active.'
)
elif data is not None:
return data
if timeout < 0:
raise SaltCloudExecutionTimeout(
'Unable to get an active spot instance request for '
'00:{0:02d}:{1:02d}'.format(
int(duration // 60),
int(duration % 60)
)
)
time.sleep(interval)
timeout -= interval
def avail_sizes(call=None):
'''
Return a dict of all available VM sizes on the cloud provider with
relevant data. Latest version can be found at:
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-types.html
'''
if call == 'action':
raise SaltCloudSystemExit(
'The avail_sizes function must be called with '
'-f or --function, or with the --list-sizes option'
)
sizes = {
'Cluster Compute': {
'cc2.8xlarge': {
'id': 'cc2.8xlarge',
'cores': '16 (2 x Intel Xeon E5-2670, eight-core with '
'hyperthread)',
'disk': '3360 GiB (4 x 840 GiB)',
'ram': '60.5 GiB'
},
'cc1.4xlarge': {
'id': 'cc1.4xlarge',
'cores': '8 (2 x Intel Xeon X5570, quad-core with '
'hyperthread)',
'disk': '1690 GiB (2 x 840 GiB)',
'ram': '22.5 GiB'
},
},
'Cluster CPU': {
'cg1.4xlarge': {
'id': 'cg1.4xlarge',
'cores': '8 (2 x Intel Xeon X5570, quad-core with '
'hyperthread), plus 2 NVIDIA Tesla M2050 GPUs',
'disk': '1680 GiB (2 x 840 GiB)',
'ram': '22.5 GiB'
},
},
'High CPU': {
'c1.xlarge': {
'id': 'c1.xlarge',
'cores': '8 (with 2.5 ECUs each)',
'disk': '1680 GiB (4 x 420 GiB)',
'ram': '8 GiB'
},
'c1.medium': {
'id': 'c1.medium',
'cores': '2 (with 2.5 ECUs each)',
'disk': '340 GiB (1 x 340 GiB)',
'ram': '1.7 GiB'
},
},
'High I/O': {
'hi1.4xlarge': {
'id': 'hi1.4xlarge',
'cores': '8 (with 4.37 ECUs each)',
'disk': '2 TiB',
'ram': '60.5 GiB'
},
},
'High Memory': {
'm2.2xlarge': {
'id': 'm2.2xlarge',
'cores': '4 (with 3.25 ECUs each)',
'disk': '840 GiB (1 x 840 GiB)',
'ram': '34.2 GiB'
},
'm2.xlarge': {
'id': 'm2.xlarge',
'cores': '2 (with 3.25 ECUs each)',
'disk': '410 GiB (1 x 410 GiB)',
'ram': '17.1 GiB'
},
'm2.4xlarge': {
'id': 'm2.4xlarge',
'cores': '8 (with 3.25 ECUs each)',
'disk': '1680 GiB (2 x 840 GiB)',
'ram': '68.4 GiB'
},
},
'High-Memory Cluster': {
'cr1.8xlarge': {
'id': 'cr1.8xlarge',
'cores': '16 (2 x Intel Xeon E5-2670, eight-core)',
'disk': '240 GiB (2 x 120 GiB SSD)',
'ram': '244 GiB'
},
},
'High Storage': {
'hs1.8xlarge': {
'id': 'hs1.8xlarge',
'cores': '16 (8 cores + 8 hyperthreads)',
'disk': '48 TiB (24 x 2 TiB hard disk drives)',
'ram': '117 GiB'
},
},
'Micro': {
't1.micro': {
'id': 't1.micro',
'cores': '1',
'disk': 'EBS',
'ram': '615 MiB'
},
},
'Standard': {
'm1.xlarge': {
'id': 'm1.xlarge',
'cores': '4 (with 2 ECUs each)',
'disk': '1680 GB (4 x 420 GiB)',
'ram': '15 GiB'
},
'm1.large': {
'id': 'm1.large',
'cores': '2 (with 2 ECUs each)',
'disk': '840 GiB (2 x 420 GiB)',
'ram': '7.5 GiB'
},
'm1.medium': {
'id': 'm1.medium',
'cores': '1',
'disk': '400 GiB',
'ram': '3.75 GiB'
},
'm1.small': {
'id': 'm1.small',
'cores': '1',
'disk': '150 GiB',
'ram': '1.7 GiB'
},
'm3.2xlarge': {
'id': 'm3.2xlarge',
'cores': '8 (with 3.25 ECUs each)',
'disk': 'EBS',
'ram': '30 GiB'
},
'm3.xlarge': {
'id': 'm3.xlarge',
'cores': '4 (with 3.25 ECUs each)',
'disk': 'EBS',
'ram': '15 GiB'
},
}
}
return sizes
def avail_images(kwargs=None, call=None):
'''
Return a dict of all available VM images on the cloud provider.
'''
if call == 'action':
raise SaltCloudSystemExit(
'The avail_images function must be called with '
'-f or --function, or with the --list-images option'
)
if type(kwargs) is not dict:
kwargs = {}
if 'owner' in kwargs:
owner = kwargs['owner']
else:
provider = get_configured_provider()
owner = config.get_cloud_config_value(
'owner', provider, __opts__, default='amazon'
)
ret = {}
params = {'Action': 'DescribeImages',
'Owner': owner}
images = query(params)
for image in images:
ret[image['imageId']] = image
return ret
def script(vm_):
'''
Return the script deployment object
'''
return salt.utils.cloud.os_script(
config.get_cloud_config_value('script', vm_, __opts__),
vm_,
__opts__,
salt.utils.cloud.salt_config_to_yaml(
salt.utils.cloud.minion_config(__opts__, vm_)
)
)
def keyname(vm_):
'''
Return the keyname
'''
return config.get_cloud_config_value(
'keyname', vm_, __opts__, search_global=False
)
def securitygroup(vm_):
'''
Return the security group
'''
return config.get_cloud_config_value(
'securitygroup', vm_, __opts__, search_global=False
)
def iam_profile(vm_):
'''
Return the IAM profile.
The IAM instance profile to associate with the instances.
This is either the Amazon Resource Name (ARN) of the instance profile
or the name of the role.
Type: String
Default: None
Required: No
Example: arn:aws:iam::111111111111:instance-profile/s3access
Example: s3access
'''
return config.get_cloud_config_value(
'iam_profile', vm_, __opts__, search_global=False
)
def ssh_username(vm_):
'''
Return the ssh_username. Defaults to a built-in list of users for trying.
'''
usernames = config.get_cloud_config_value(
'ssh_username', vm_, __opts__
)
if not isinstance(usernames, list):
usernames = [usernames]
# get rid of None's or empty names
usernames = filter(lambda x: x, usernames)
# Keep a copy of the usernames the user might have provided
initial = usernames[:]
# Add common usernames to the list to be tested
for name in ('ec2-user', 'ubuntu', 'admin', 'bitnami', 'root'):
if name not in usernames:
usernames.append(name)
# Add the user provided usernames to the end of the list since enough time
# might need to pass before the remote service is available for logins and
# the proper username might have passed it's iteration.
# This has detected in a CentOS 5.7 EC2 image
usernames.extend(initial)
return usernames
def ssh_interface(vm_):
'''
Return the ssh_interface type to connect to. Either 'public_ips' (default)
or 'private_ips'.
'''
return config.get_cloud_config_value(
'ssh_interface', vm_, __opts__, default='public_ips',
search_global=False
)
def get_location(vm_=None):
'''
Return the EC2 region to use, in this order:
- CLI parameter
- VM parameter
- Cloud profile setting
'''
return __opts__.get(
'location',
config.get_cloud_config_value(
'location',
vm_ or get_configured_provider(),
__opts__,
default=DEFAULT_LOCATION,
search_global=False
)
)
def avail_locations(call=None):
'''
List all available locations
'''
if call == 'action':
raise SaltCloudSystemExit(
'The avail_locations function must be called with '
'-f or --function, or with the --list-locations option'
)
ret = {}
params = {'Action': 'DescribeRegions'}
result = query(params)
for region in result:
ret[region['regionName']] = {
'name': region['regionName'],
'endpoint': region['regionEndpoint'],
}
return ret
def get_availability_zone(vm_):
'''
Return the availability zone to use
'''
avz = config.get_cloud_config_value(
'availability_zone', vm_, __opts__, search_global=False
)
if avz is None:
return None
zones = list_availability_zones()
# Validate user-specified AZ
if avz not in zones.keys():
raise SaltCloudException(
'The specified availability zone isn\'t valid in this region: '
'{0}\n'.format(
avz
)
)
# check specified AZ is available
elif zones[avz] != 'available':
raise SaltCloudException(
'The specified availability zone isn\'t currently available: '
'{0}\n'.format(
avz
)
)
return avz
def get_subnetid(vm_):
'''
Returns the SubnetId to use
'''
subnetid = config.get_cloud_config_value(
'subnetid', vm_, __opts__, search_global=False
)
if subnetid is None:
return None
return subnetid
def securitygroupid(vm_):
'''
Returns the SecurityGroupId
'''
return config.get_cloud_config_value(
'securitygroupid', vm_, __opts__, search_global=False
)
def get_spot_config(vm_):
'''
Returns the spot instance configuration for the provided vm
'''
return config.get_cloud_config_value(
'spot_config', vm_, __opts__, search_global=False
)
def list_availability_zones():
'''
List all availability zones in the current region
'''
ret = {}
params = {'Action': 'DescribeAvailabilityZones',
'Filter.0.Name': 'region-name',
'Filter.0.Value.0': get_location()}
result = query(params)
for zone in result:
ret[zone['zoneName']] = zone['zoneState']
return ret
def block_device_mappings(vm_):
'''
Return the block device mapping:
::
[{'DeviceName': '/dev/sdb', 'VirtualName': 'ephemeral0'},
{'DeviceName': '/dev/sdc', 'VirtualName': 'ephemeral1'}]
'''
return config.get_cloud_config_value(
'block_device_mappings', vm_, __opts__, search_global=True
)
def _param_from_config(key, data):
'''
Return EC2 API parameters based on the given config data.
Examples:
1. List of dictionaries
>>> data = [
... {'DeviceIndex': 0, 'SubnetId': 'subid0', 'AssociatePublicIpAddress': True},
... {'DeviceIndex': 1, 'SubnetId': 'subid1', 'PrivateIpAddress': '192.168.1.128'}
... ]
>>> _param_from_config('NetworkInterface', data)
{'NetworkInterface.0.SubnetId': 'subid0', 'NetworkInterface.0.DeviceIndex': 0, 'NetworkInterface.1.SubnetId': 'subid1', 'NetworkInterface.1.PrivateIpAddress': '192.168.1.128', 'NetworkInterface.0.AssociatePublicIpAddress': 'true', 'NetworkInterface.1.DeviceIndex': 1}
2. List of nested dictionaries
>>> data = [
... {'DeviceName': '/dev/sdf', 'Ebs': {'SnapshotId': 'dummy0', 'VolumeSize': 200, 'VolumeType': 'standard'}},
... {'DeviceName': '/dev/sdg', 'Ebs': {'SnapshotId': 'dummy1', 'VolumeSize': 100, 'VolumeType': 'standard'}}
... ]
>>> _param_from_config('BlockDeviceMapping', data)
{'BlockDeviceMapping.0.Ebs.VolumeType': 'standard', 'BlockDeviceMapping.1.Ebs.SnapshotId': 'dummy1', 'BlockDeviceMapping.0.Ebs.VolumeSize': 200, 'BlockDeviceMapping.0.Ebs.SnapshotId': 'dummy0', 'BlockDeviceMapping.1.Ebs.VolumeType': 'standard', 'BlockDeviceMapping.1.DeviceName': '/dev/sdg', 'BlockDeviceMapping.1.Ebs.VolumeSize': 100, 'BlockDeviceMapping.0.DeviceName': '/dev/sdf'}
3. Dictionary of dictionaries
>>> data = { 'Arn': 'dummyarn', 'Name': 'Tester' }
>>> _param_from_config('IamInstanceProfile', data)
{'IamInstanceProfile.Arn': 'dummyarn', 'IamInstanceProfile.Name': 'Tester'}
'''
param = {}
if isinstance(data, dict):
for k, v in data.items():
param.update(_param_from_config('{0}.{1}'.format(key, k), v))
elif isinstance(data, list) or isinstance(data, tuple):
for idx, conf_item in enumerate(data):
prefix = '{0}.{1}'.format(key, idx)
param.update(_param_from_config(prefix, conf_item))
else:
if isinstance(data, bool):
# convert boolean Trur/False to 'true'/'false'
param.update({key: str(data).lower()})
else:
param.update({key: data})
return param
def create(vm_=None, call=None):
'''
Create a single VM from a data dict
'''
if call:
raise SaltCloudSystemExit(
'You cannot create an instance with -a or -f.'
)
salt.utils.cloud.fire_event(
'event',
'starting create',
'salt/cloud/{0}/creating'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['provider'],
},
)
key_filename = config.get_cloud_config_value(
'private_key', vm_, __opts__, search_global=False, default=None
)
if key_filename is not None and not os.path.isfile(key_filename):
raise SaltCloudConfigError(
'The defined key_filename {0!r} does not exist'.format(
key_filename
)
)
location = get_location(vm_)
log.info('Creating Cloud VM {0} in {1}'.format(vm_['name'], location))
usernames = ssh_username(vm_)
# do we launch a regular vm or a spot instance?
# see http://goo.gl/hYZ13f for more information on EC2 API
spot_config = get_spot_config(vm_)
if spot_config is not None:
if 'spot_price' not in spot_config:
raise SaltCloudSystemExit(
'Spot instance config for {0} requires a spot_price '
'attribute.'.format(vm_['name'])
)
params = {'Action': 'RequestSpotInstances',
'InstanceCount': '1',
'Type': spot_config['type'] if 'type' in spot_config else 'one-time',
'SpotPrice': spot_config['spot_price']}
# All of the necessary launch parameters for a VM when using
# spot instances are the same except for the prefix below
# being tacked on.
spot_prefix = 'LaunchSpecification.'
# regular EC2 instance
else:
params = {'Action': 'RunInstances',
'MinCount': '1',
'MaxCount': '1'}
# Normal instances should have no prefix.
spot_prefix = ''
image_id = vm_['image']
params[spot_prefix + 'ImageId'] = image_id
vm_size = config.get_cloud_config_value(
'size', vm_, __opts__, search_global=False
)
if vm_size in SIZE_MAP:
vm_size = SIZE_MAP[vm_size]
params[spot_prefix + 'InstanceType'] = vm_size
ex_keyname = keyname(vm_)
if ex_keyname:
params[spot_prefix + 'KeyName'] = ex_keyname
ex_securitygroup = securitygroup(vm_)
if ex_securitygroup:
if not isinstance(ex_securitygroup, list):
params[spot_prefix + 'SecurityGroup.1'] = ex_securitygroup
else:
for counter, sg_ in enumerate(ex_securitygroup):
params[spot_prefix + 'SecurityGroup.{0}'.format(counter)] = sg_
ex_iam_profile = iam_profile(vm_)
if ex_iam_profile:
try:
if ex_iam_profile.startswith('arn:aws:iam:'):
params[spot_prefix + 'IamInstanceProfile.Arn'] = ex_iam_profile
else:
params[spot_prefix + 'IamInstanceProfile.Name'] = ex_iam_profile
except AttributeError:
raise SaltCloudConfigError(
'\'iam_profile\' should be a string value.'
)
az_ = get_availability_zone(vm_)
if az_ is not None:
params[spot_prefix + 'Placement.AvailabilityZone'] = az_
subnetid_ = get_subnetid(vm_)
if subnetid_ is not None:
params['SubnetId'] = subnetid_
ex_securitygroupid = securitygroupid(vm_)
if ex_securitygroupid:
if not isinstance(ex_securitygroupid, list):
params[spot_prefix + 'SecurityGroupId.1'] = ex_securitygroupid
else:
for (counter, sg_) in enumerate(ex_securitygroupid):
params[spot_prefix + 'SecurityGroupId.{0}'.format(counter)] = sg_
ex_blockdevicemappings = block_device_mappings(vm_)
if ex_blockdevicemappings:
params.update(_param_from_config(spot_prefix + 'BlockDeviceMapping', ex_blockdevicemappings))
network_interfaces = config.get_cloud_config_value(
'network_interfaces', vm_, __opts__, search_global=False
)
if network_interfaces:
params.update(_param_from_config(spot_prefix + 'NetworkInterface', network_interfaces))
set_ebs_optimized = config.get_cloud_config_value(
'ebs_optimized', vm_, __opts__, search_global=False
)
if set_ebs_optimized is not None:
if not isinstance(set_ebs_optimized, bool):
raise SaltCloudConfigError(
'\'ebs_optimized\' should be a boolean value.'
)
params['EbsOptimized'] = set_ebs_optimized
set_del_root_vol_on_destroy = config.get_cloud_config_value(
'del_root_vol_on_destroy', vm_, __opts__, search_global=False
)
if set_del_root_vol_on_destroy is not None:
if not isinstance(set_del_root_vol_on_destroy, bool):
raise SaltCloudConfigError(
'\'del_root_vol_on_destroy\' should be a boolean value.'
)
if set_del_root_vol_on_destroy:
# first make sure to look up the root device name
# as Ubuntu and CentOS (and most likely other OSs)
# use different device identifiers
log.info('Attempting to look up root device name for image id {0} on '
'VM {1}'.format(image_id, vm_['name']))
rd_params = {
'Action': 'DescribeImages',
'ImageId.1': image_id
}
try:
rd_data = query(rd_params, location=location)
if 'error' in rd_data:
return rd_data['error']
log.debug('EC2 Response: {0!r}'.format(rd_data))
except Exception as exc:
log.error(
'Error getting root device name for image id {0} for '
'VM {1}: \n{2}'.format(image_id, vm_['name'], exc),
# Show the traceback if the debug logging level is enabled
exc_info=log.isEnabledFor(logging.DEBUG)
)
raise
# make sure we have a response
if not rd_data:
err_msg = 'There was an error querying EC2 for the root device ' \
'of image id {0}. Empty response.'.format(image_id)
raise SaltCloudSystemExit(err_msg)
# pull the root device name from the result and use it when
# launching the new VM
if rd_data[0]['blockDeviceMapping'] is None:
# Some ami instances do not have a root volume. Ignore such cases
rd_name = None
elif type(rd_data[0]['blockDeviceMapping']['item']) is list:
rd_name = rd_data[0]['blockDeviceMapping']['item'][0]['deviceName']
else:
rd_name = rd_data[0]['blockDeviceMapping']['item']['deviceName']
log.info('Found root device name: {0}'.format(rd_name))
if rd_name is not None:
if ex_blockdevicemappings:
dev_list = [dev['DeviceName'] for dev in ex_blockdevicemappings]
else:
dev_list = []
if rd_name in dev_list:
dev_index = dev_list.index(rd_name)
termination_key = spot_prefix + 'BlockDeviceMapping.%d.Ebs.DeleteOnTermination' % dev_index
params[termination_key] = str(set_del_root_vol_on_destroy).lower()
else:
dev_index = len(dev_list)
params[spot_prefix + 'BlockDeviceMapping.%d.DeviceName' % dev_index] = rd_name
params[spot_prefix + 'BlockDeviceMapping.%d.Ebs.DeleteOnTermination' % dev_index] = str(
set_del_root_vol_on_destroy
).lower()
set_del_all_vols_on_destroy = config.get_cloud_config_value(
'del_all_vols_on_destroy', vm_, __opts__, search_global=False
)
if set_del_all_vols_on_destroy is not None:
if not isinstance(set_del_all_vols_on_destroy, bool):
raise SaltCloudConfigError(
'\'del_all_vols_on_destroy\' should be a boolean value.'
)
tags = config.get_cloud_config_value('tag', vm_, __opts__, {}, search_global=False)
if not isinstance(tags, dict):
raise SaltCloudConfigError(
'\'tag\' should be a dict.'
)
for value in tags.values():
if not isinstance(value, str):
raise SaltCloudConfigError(
'\'tag\' values must be strings. Try quoting the values. e.g. "2013-09-19T20:09:46Z".'
)
tags['Name'] = vm_['name']
salt.utils.cloud.fire_event(
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(vm_['name']),
{'kwargs': params, 'location': location},
)
try:
data = query(params, 'instancesSet', location=location)
if 'error' in data:
return data['error']
except Exception as exc:
log.error(
'Error creating {0} on EC2 when trying to run the initial '
'deployment: \n{1}'.format(
vm_['name'], exc
),
# Show the traceback if the debug logging level is enabled
exc_info=log.isEnabledFor(logging.DEBUG)
)
raise
# if we're using spot instances, we need to wait for the spot request
# to become active before we continue
if spot_config:
sir_id = data[0]['spotInstanceRequestId']
def __query_spot_instance_request(sir_id, location):
params = {'Action': 'DescribeSpotInstanceRequests',
'SpotInstanceRequestId.1': sir_id}
data = query(params, location=location)
if not data:
log.error(
'There was an error while querying EC2. Empty response'
)
# Trigger a failure in the wait for spot instance method
return False
if isinstance(data, dict) and 'error' in data:
log.warn(
'There was an error in the query. {0}'.format(data['error'])
)
# Trigger a failure in the wait for spot instance method
return False
log.debug('Returned query data: {0}'.format(data))
if 'state' in data[0]:
state = data[0]['state']
if state == 'active':
return data
if state == 'open':
# Still waiting for an active state
log.info('Spot instance status: {0}'.format(
data[0]['status']['message']
))
return None
if state in ['cancelled', 'failed', 'closed']:
# Request will never be active, fail
log.error('Spot instance request resulted in state \'{0}\'. '
'Nothing else we can do here.')
return False
salt.utils.cloud.fire_event(
'event',
'waiting for spot instance',
'salt/cloud/{0}/waiting_for_spot'.format(vm_['name']),
)
try:
data = _wait_for_spot_instance(
__query_spot_instance_request,
update_args=(sir_id, location),
timeout=config.get_cloud_config_value(
'wait_for_spot_timeout', vm_, __opts__, default=10 * 60),
max_failures=5
)
log.debug('wait_for_spot_instance data {0}'.format(data))
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
# Cancel the existing spot instance request
params = {'Action': 'CancelSpotInstanceRequests',
'SpotInstanceRequestId.1': sir_id}
data = query(params, location=location)
log.debug('Canceled spot instance request {0}. Data '
'returned: {1}'.format(sir_id, data))
except SaltCloudSystemExit:
pass
finally:
raise SaltCloudSystemExit(exc.message)
# Pull the instance ID, valid for both spot and normal instances
instance_id = data[0]['instanceId']
salt.utils.cloud.fire_event(
'event',
'querying instance',
'salt/cloud/{0}/querying'.format(vm_['name']),
{'instance_id': instance_id},
)
log.debug('The new VM instance_id is {0}'.format(instance_id))
params = {'Action': 'DescribeInstances',
'InstanceId.1': instance_id}
attempts = 5
while attempts > 0:
data, requesturl = query(params, location=location, return_url=True)
log.debug('The query returned: {0}'.format(data))
if isinstance(data, dict) and 'error' in data:
log.warn(
'There was an error in the query. {0} attempts '
'remaining: {1}'.format(
attempts, data['error']
)
)
attempts -= 1
continue
if isinstance(data, list) and not data:
log.warn(
'Query returned an empty list. {0} attempts '
'remaining.'.format(attempts)
)
attempts -= 1
continue
break
else:
raise SaltCloudSystemExit(
'An error occurred while creating VM: {0}'.format(data['error'])
)
def __query_ip_address(params, url):
data = query(params, requesturl=url)
if not data:
log.error(
'There was an error while querying EC2. Empty response'
)
# Trigger a failure in the wait for IP function
return False
if isinstance(data, dict) and 'error' in data:
log.warn(
'There was an error in the query. {0}'.format(data['error'])
)
# Trigger a failure in the wait for IP function
return False
log.debug('Returned query data: {0}'.format(data))
if 'ipAddress' in data[0]['instancesSet']['item']:
return data
if ssh_interface(vm_) == 'private_ips' and \
'privateIpAddress' in data[0]['instancesSet']['item']:
return data
try:
data = salt.utils.cloud.wait_for_ip(
__query_ip_address,
update_args=(params, requesturl),
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=10 * 60),
interval=config.get_cloud_config_value(
'wait_for_ip_interval', vm_, __opts__, default=10),
)
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
# It might be already up, let's destroy it!
destroy(vm_['name'])
except SaltCloudSystemExit:
pass
finally:
raise SaltCloudSystemExit(exc.message)
salt.utils.cloud.fire_event(
'event',
'setting tags',
'salt/cloud/{0}/tagging'.format(vm_['name']),
{'tags': tags},
)
set_tags(
vm_['name'], tags,
instance_id=instance_id, call='action', location=location
)
log.info('Created node {0}'.format(vm_['name']))
if ssh_interface(vm_) == 'private_ips':
ip_address = data[0]['instancesSet']['item']['privateIpAddress']
log.info('Salt node data. Private_ip: {0}'.format(ip_address))
else:
ip_address = data[0]['instancesSet']['item']['ipAddress']
log.info('Salt node data. Public_ip: {0}'.format(ip_address))
display_ssh_output = config.get_cloud_config_value(
'display_ssh_output', vm_, __opts__, default=True
)
salt.utils.cloud.fire_event(
'event',
'waiting for ssh',
'salt/cloud/{0}/waiting_for_ssh'.format(vm_['name']),
{'ip_address': ip_address},
)
ssh_connect_timeout = config.get_cloud_config_value(
'ssh_connect_timeout', vm_, __opts__, 900 # 15 minutes
)
if config.get_cloud_config_value('win_installer', vm_, __opts__):
username = config.get_cloud_config_value(
'win_username', vm_, __opts__, default='Administrator'
)
win_passwd = config.get_cloud_config_value(
'win_password', vm_, __opts__, default=''
)
if not salt.utils.cloud.wait_for_port(ip_address,
port=445,
timeout=ssh_connect_timeout):
raise SaltCloudSystemExit(
'Failed to connect to remote windows host'
)
if not salt.utils.cloud.validate_windows_cred(ip_address,
username,
win_passwd):
raise SaltCloudSystemExit(
'Failed to authenticate against remote windows host'
)
elif salt.utils.cloud.wait_for_port(ip_address,
timeout=ssh_connect_timeout):
for user in usernames:
if salt.utils.cloud.wait_for_passwd(
host=ip_address,
username=user,
ssh_timeout=config.get_cloud_config_value(
'wait_for_passwd_timeout', vm_, __opts__, default=1 * 60),
key_filename=key_filename,
display_ssh_output=display_ssh_output
):
username = user
break
else:
raise SaltCloudSystemExit(
'Failed to authenticate against remote ssh'
)
else:
raise SaltCloudSystemExit(
'Failed to connect to remote ssh'
)
ret = {}
if config.get_cloud_config_value('deploy', vm_, __opts__) is True:
deploy_script = script(vm_)
deploy_kwargs = {
'host': ip_address,
'username': username,
'key_filename': key_filename,
'tmp_dir': config.get_cloud_config_value(
'tmp_dir', vm_, __opts__, default='/tmp/.saltcloud'
),
'deploy_command': config.get_cloud_config_value(
'deploy_command', vm_, __opts__,
default='/tmp/.saltcloud/deploy.sh',
),
'tty': config.get_cloud_config_value(
'tty', vm_, __opts__, default=True
),
'script': deploy_script,
'name': vm_['name'],
'sudo': config.get_cloud_config_value(
'sudo', vm_, __opts__, default=(username != 'root')
),
'sudo_password': config.get_cloud_config_value(
'sudo_password', vm_, __opts__, default=None
),
'start_action': __opts__['start_action'],
'parallel': __opts__['parallel'],
'conf_file': __opts__['conf_file'],
'sock_dir': __opts__['sock_dir'],
'minion_pem': vm_['priv_key'],
'minion_pub': vm_['pub_key'],
'keep_tmp': __opts__['keep_tmp'],
'preseed_minion_keys': vm_.get('preseed_minion_keys', None),
'display_ssh_output': display_ssh_output,
'minion_conf': salt.utils.cloud.minion_config(__opts__, vm_),
'script_args': config.get_cloud_config_value(
'script_args', vm_, __opts__
),
'script_env': config.get_cloud_config_value(
'script_env', vm_, __opts__
)
}
# Deploy salt-master files, if necessary
if config.get_cloud_config_value('make_master', vm_, __opts__) is True:
deploy_kwargs['make_master'] = True
deploy_kwargs['master_pub'] = vm_['master_pub']
deploy_kwargs['master_pem'] = vm_['master_pem']
master_conf = salt.utils.cloud.master_config(__opts__, vm_)
deploy_kwargs['master_conf'] = master_conf
if master_conf.get('syndic_master', None):
deploy_kwargs['make_syndic'] = True
deploy_kwargs['make_minion'] = config.get_cloud_config_value(
'make_minion', vm_, __opts__, default=True
)
# Check for Windows install params
win_installer = config.get_cloud_config_value('win_installer',
vm_,
__opts__)
if win_installer:
deploy_kwargs['win_installer'] = win_installer
minion = salt.utils.cloud.minion_config(__opts__, vm_)
deploy_kwargs['master'] = minion['master']
deploy_kwargs['username'] = config.get_cloud_config_value(
'win_username', vm_, __opts__, default='Administrator'
)
deploy_kwargs['password'] = config.get_cloud_config_value(
'win_password', vm_, __opts__, default=''
)
# Store what was used to the deploy the VM
event_kwargs = copy.deepcopy(deploy_kwargs)
del event_kwargs['minion_pem']
del event_kwargs['minion_pub']
del event_kwargs['sudo_password']
if 'password' in event_kwargs:
del event_kwargs['password']
ret['deploy_kwargs'] = event_kwargs
salt.utils.cloud.fire_event(
'event',
'executing deploy script',
'salt/cloud/{0}/deploying'.format(vm_['name']),
{'kwargs': event_kwargs},
)
deployed = False
if win_installer:
deployed = salt.utils.cloud.deploy_windows(**deploy_kwargs)
else:
deployed = salt.utils.cloud.deploy_script(**deploy_kwargs)
if deployed:
log.info('Salt installed on {name}'.format(**vm_))
else:
log.error('Failed to start Salt on Cloud VM {name}'.format(**vm_))
log.info('Created Cloud VM {0[name]!r}'.format(vm_))
log.debug(
'{0[name]!r} VM creation details:\n{1}'.format(
vm_, pprint.pformat(data[0]['instancesSet']['item'])
)
)
ret.update(data[0]['instancesSet']['item'])
# Get ANY defined volumes settings, merging data, in the following order
# 1. VM config
# 2. Profile config
# 3. Global configuration
volumes = config.get_cloud_config_value(
'volumes', vm_, __opts__, search_global=True
)
if volumes:
salt.utils.cloud.fire_event(
'event',
'attaching volumes',
'salt/cloud/{0}/attaching_volumes'.format(vm_['name']),
{'volumes': volumes},
)
log.info('Create and attach volumes to node {0}'.format(vm_['name']))
created = create_attach_volumes(
vm_['name'],
{
'volumes': volumes,
'zone': ret['placement']['availabilityZone'],
'instance_id': ret['instanceId'],
'del_all_vols_on_destroy': set_del_all_vols_on_destroy
},
call='action'
)
ret['Attached Volumes'] = created
salt.utils.cloud.fire_event(
'event',
'created instance',
'salt/cloud/{0}/created'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['provider'],
'instance_id': instance_id,
},
)
return ret
def create_attach_volumes(name, kwargs, call=None):
'''
Create and attach volumes to created node
'''
if call != 'action':
raise SaltCloudSystemExit(
'The create_attach_volumes action must be called with '
'-a or --action.'
)
if not 'instance_id' in kwargs:
kwargs['instance_id'] = _get_node(name)['instanceId']
if type(kwargs['volumes']) is str:
volumes = yaml.safe_load(kwargs['volumes'])
else:
volumes = kwargs['volumes']
ret = []
for volume in volumes:
created = False
volume_name = '{0} on {1}'.format(volume['device'], name)
volume_dict = {
'volume_name': volume_name,
'zone': kwargs['zone']
}
if 'volume_id' in volume:
volume_dict['volume_id'] = volume['volume_id']
elif 'snapshot' in volume:
volume_dict['snapshot'] = volume['snapshot']
else:
volume_dict['size'] = volume['size']
if 'type' in volume:
volume_dict['type'] = volume['type']
if 'iops' in volume:
volume_dict['iops'] = volume['iops']
if 'volume_id' not in volume_dict:
created_volume = create_volume(volume_dict, call='function')
created = True
for item in created_volume:
if 'volumeId' in item:
volume_dict['volume_id'] = item['volumeId']
attach = attach_volume(
name,
{'volume_id': volume_dict['volume_id'], 'device': volume['device']},
instance_id=kwargs['instance_id'],
call='action'
)
# Update the delvol parameter for this volume
delvols_on_destroy = kwargs.get('del_all_vols_on_destroy', None)
if attach and created and delvols_on_destroy is not None:
_toggle_delvol(instance_id=kwargs['instance_id'],
device=volume['device'],
value=delvols_on_destroy)
if attach:
msg = (
'{0} attached to {1} (aka {2}) as device {3}'.format(
volume_dict['volume_id'],
kwargs['instance_id'],
name,
volume['device']
)
)
log.info(msg)
ret.append(msg)
return ret
def stop(name, call=None):
'''
Stop a node
'''
if call != 'action':
raise SaltCloudSystemExit(
'The stop action must be called with -a or --action.'
)
log.info('Stopping node {0}'.format(name))
instance_id = _get_node(name)['instanceId']
params = {'Action': 'StopInstances',
'InstanceId.1': instance_id}
result = query(params)
return result
def start(name, call=None):
'''
Start a node
'''
if call != 'action':
raise SaltCloudSystemExit(
'The start action must be called with -a or --action.'
)
log.info('Starting node {0}'.format(name))
instance_id = _get_node(name)['instanceId']
params = {'Action': 'StartInstances',
'InstanceId.1': instance_id}
result = query(params)
return result
def set_tags(name, tags, call=None, location=None, instance_id=None):
'''
Set tags for a node
CLI Example::
salt-cloud -a set_tags mymachine tag1=somestuff tag2='Other stuff'
'''
if call != 'action':
raise SaltCloudSystemExit(
'The set_tags action must be called with -a or --action.'
)
if instance_id is None:
instance_id = _get_node(name, location)['instanceId']
params = {'Action': 'CreateTags',
'ResourceId.1': instance_id}
log.debug('Tags to set for {0}: {1}'.format(name, tags))
for idx, (tag_k, tag_v) in enumerate(tags.iteritems()):
params['Tag.{0}.Key'.format(idx)] = tag_k
params['Tag.{0}.Value'.format(idx)] = tag_v
attempts = 5
while attempts >= 0:
query(params, setname='tagSet', location=location)
settags = get_tags(
instance_id=instance_id, call='action', location=location
)
log.debug('Setting the tags returned: {0}'.format(settags))
failed_to_set_tags = False
for tag in settags:
if tag['key'] not in tags:
# We were not setting this tag
continue
if str(tags.get(tag['key'])) != str(tag['value']):
# Not set to the proper value!?
failed_to_set_tags = True
break
if failed_to_set_tags:
log.warn(
'Failed to set tags. Remaining attempts {0}'.format(
attempts
)
)
attempts -= 1
continue
return settags
raise SaltCloudSystemExit(
'Failed to set tags on {0}!'.format(name)
)
def get_tags(name=None, instance_id=None, call=None, location=None):
'''
Retrieve tags for a node
'''
if call != 'action':
raise SaltCloudSystemExit(
'The get_tags action must be called with -a or --action.'
)
if instance_id is None:
if location is None:
location = get_location()
instances = list_nodes_full(location)
if name in instances:
instance_id = instances[name]['instanceId']
params = {'Action': 'DescribeTags',
'Filter.1.Name': 'resource-id',
'Filter.1.Value': instance_id}
return query(params, setname='tagSet', location=location)
def del_tags(name, kwargs, call=None):
'''
Delete tags for a node
CLI Example::
salt-cloud -a del_tags mymachine tag1,tag2,tag3
'''
if call != 'action':
raise SaltCloudSystemExit(
'The del_tags action must be called with -a or --action.'
)
if not 'tags' in kwargs:
raise SaltCloudSystemExit(
'A tag or tags must be specified using tags=list,of,tags'
)
instance_id = _get_node(name)['instanceId']
params = {'Action': 'DeleteTags',
'ResourceId.1': instance_id}
for idx, tag in enumerate(kwargs['tags'].split(',')):
params['Tag.{0}.Key'.format(idx)] = tag
query(params, setname='tagSet')
return get_tags(name, call='action')
def rename(name, kwargs, call=None):
'''
Properly rename a node. Pass in the new name as "new name".
CLI Example::
salt-cloud -a rename mymachine newname=yourmachine
'''
if call != 'action':
raise SaltCloudSystemExit(
'The rename action must be called with -a or --action.'
)
log.info('Renaming {0} to {1}'.format(name, kwargs['newname']))
set_tags(name, {'Name': kwargs['newname']}, call='action')
salt.utils.cloud.rename_key(
__opts__['pki_dir'], name, kwargs['newname']
)
def destroy(name, call=None):
'''
Destroy a node. Will check termination protection and warn if enabled.
CLI Example::
salt-cloud --destroy mymachine
'''
if call == 'function':
raise SaltCloudSystemExit(
'The destroy action must be called with -d, --destroy, '
'-a or --action.'
)
node_metadata = _get_node(name)
instance_id = node_metadata['instanceId']
sir_id = node_metadata.get('spotInstanceRequestId')
protected = show_term_protect(
name=name,
instance_id=instance_id,
call='action',
quiet=True
)
salt.utils.cloud.fire_event(
'event',
'destroying instance',
'salt/cloud/{0}/destroying'.format(name),
{'name': name, 'instance_id': instance_id},
)
if protected == 'true':
raise SaltCloudSystemExit(
'This instance has been protected from being destroyed. '
'Use the following command to disable protection:\n\n'
'salt-cloud -a disable_term_protect {0}'.format(
name
)
)
ret = {}
if config.get_cloud_config_value('rename_on_destroy',
get_configured_provider(),
__opts__, search_global=False) is True:
newname = '{0}-DEL{1}'.format(name, uuid.uuid4().hex)
rename(name, kwargs={'newname': newname}, call='action')
log.info(
'Machine will be identified as {0} until it has been '
'cleaned up.'.format(
newname
)
)
ret['newname'] = newname
params = {'Action': 'TerminateInstances',
'InstanceId.1': instance_id}
result = query(params)
log.info(result)
ret.update(result[0])
# If this instance is part of a spot instance request, we
# need to cancel it as well
if sir_id is not None:
params = {'Action': 'CancelSpotInstanceRequests',
'SpotInstanceRequestId.1': sir_id}
result = query(params)
ret['spotInstance'] = result[0]
salt.utils.cloud.fire_event(
'event',
'destroyed instance',
'salt/cloud/{0}/destroyed'.format(name),
{'name': name, 'instance_id': instance_id},
)
return ret
def reboot(name, call=None):
'''
Reboot a node.
CLI Example::
salt-cloud -a reboot mymachine
'''
instance_id = _get_node(name)['instanceId']
params = {'Action': 'RebootInstances',
'InstanceId.1': instance_id}
result = query(params)
if result == []:
log.info("Complete")
return {'Reboot': 'Complete'}
def show_image(kwargs, call=None):
'''
Show the details from EC2 concerning an AMI
'''
if call != 'function':
raise SaltCloudSystemExit(
'The show_image action must be called with -f or --function.'
)
params = {'ImageId.1': kwargs['image'],
'Action': 'DescribeImages'}
result = query(params)
log.info(result)
return result
def show_instance(name, call=None):
'''
Show the details from EC2 concerning an AMI
'''
if call != 'action':
raise SaltCloudSystemExit(
'The show_instance action must be called with -a or --action.'
)
return _get_node(name)
def _get_node(name, location=None):
if location is None:
location = get_location()
attempts = 10
while attempts >= 0:
try:
return list_nodes_full(location)[name]
except KeyError:
attempts -= 1
log.debug(
'Failed to get the data for the node {0!r}. Remaining '
'attempts {1}'.format(
name, attempts
)
)
# Just a little delay between attempts...
time.sleep(0.5)
return {}
def list_nodes_full(location=None, call=None):
'''
Return a list of the VMs that are on the provider
'''
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes_full function must be called with -f or --function.'
)
if not location:
ret = {}
locations = set(
get_location(vm_) for vm_ in __opts__['profiles'].values()
if _vm_provider_driver(vm_)
)
for loc in locations:
ret.update(_list_nodes_full(loc))
return ret
return _list_nodes_full(location)
def _vm_provider_driver(vm_):
alias, driver = vm_['provider'].split(':')
if alias not in __opts__['providers']:
return None
if driver not in __opts__['providers'][alias]:
return None
return driver == 'ec2'
def _extract_name_tag(item):
if 'tagSet' in item:
tagset = item['tagSet']
if type(tagset['item']) is list:
for tag in tagset['item']:
if tag['key'] == 'Name':
return tag['value']
return item['instanceId']
return (item['tagSet']['item']['value'])
return item['instanceId']
def _list_nodes_full(location=None):
'''
Return a list of the VMs that in this location
'''
ret = {}
params = {'Action': 'DescribeInstances'}
instances = query(params, location=location)
if 'error' in instances:
raise SaltCloudSystemExit(
'An error occurred while listing nodes: {0}'.format(
instances['error']['Errors']['Error']['Message']
)
)
for instance in instances:
# items could be type dict or list (for stopped EC2 instances)
if isinstance(instance['instancesSet']['item'], list):
for item in instance['instancesSet']['item']:
name = _extract_name_tag(item)
ret[name] = item
ret[name].update(
dict(
id=item['instanceId'],
image=item['imageId'],
size=item['instanceType'],
state=item['instanceState']['name'],
private_ips=item.get('privateIpAddress', []),
public_ips=item.get('ipAddress', [])
)
)
else:
item = instance['instancesSet']['item']
name = _extract_name_tag(item)
ret[name] = item
ret[name].update(
dict(
id=item['instanceId'],
image=item['imageId'],
size=item['instanceType'],
state=item['instanceState']['name'],
private_ips=item.get('privateIpAddress', []),
public_ips=item.get('ipAddress', [])
)
)
return ret
def list_nodes(call=None):
'''
Return a list of the VMs that are on the provider
'''
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes function must be called with -f or --function.'
)
ret = {}
nodes = list_nodes_full(get_location())
if 'error' in nodes:
raise SaltCloudSystemExit(
'An error occurred while listing nodes: {0}'.format(
nodes['error']['Errors']['Error']['Message']
)
)
for node in nodes:
ret[node] = {
'id': nodes[node]['id'],
'image': nodes[node]['image'],
'size': nodes[node]['size'],
'state': nodes[node]['state'],
'private_ips': nodes[node]['private_ips'],
'public_ips': nodes[node]['public_ips'],
}
return ret
def list_nodes_select(call=None):
'''
Return a list of the VMs that are on the provider, with select fields
'''
return salt.utils.cloud.list_nodes_select(
list_nodes_full(get_location()), __opts__['query.selection'], call,
)
def show_term_protect(name=None, instance_id=None, call=None, quiet=False):
'''
Show the details from EC2 concerning an AMI
'''
if call != 'action':
raise SaltCloudSystemExit(
'The show_term_protect action must be called with -a or --action.'
)
if not instance_id:
instances = list_nodes_full(get_location())
instance_id = instances[name]['instanceId']
params = {'Action': 'DescribeInstanceAttribute',
'InstanceId': instance_id,
'Attribute': 'disableApiTermination'}
result = query(params, return_root=True)
disable_protect = False
for item in result:
if 'value' in item:
disable_protect = item['value']
break
log.log(
logging.DEBUG if quiet is True else logging.INFO,
'Termination Protection is {0} for {1}'.format(
disable_protect == 'true' and 'enabled' or 'disabled',
name
)
)
return disable_protect
def enable_term_protect(name, call=None):
'''
Enable termination protection on a node
CLI Example::
salt-cloud -a enable_term_protect mymachine
'''
if call != 'action':
raise SaltCloudSystemExit(
'The enable_term_protect action must be called with '
'-a or --action.'
)
return _toggle_term_protect(name, 'true')
def disable_term_protect(name, call=None):
'''
Disable termination protection on a node
CLI Example::
salt-cloud -a disable_term_protect mymachine
'''
if call != 'action':
raise SaltCloudSystemExit(
'The disable_term_protect action must be called with '
'-a or --action.'
)
return _toggle_term_protect(name, 'false')
def _toggle_term_protect(name, value):
'''
Disable termination protection on a node
CLI Example::
salt-cloud -a disable_term_protect mymachine
'''
instances = list_nodes_full(get_location())
instance_id = instances[name]['instanceId']
params = {'Action': 'ModifyInstanceAttribute',
'InstanceId': instance_id,
'DisableApiTermination.Value': value}
query(params, return_root=True)
return show_term_protect(name=name, instance_id=instance_id, call='action')
def show_delvol_on_destroy(name, kwargs=None, call=None):
'''
Do not delete all/specified EBS volumes upon instance termination
CLI Example::
salt-cloud -a show_delvol_on_destroy mymachine
'''
if call != 'action':
raise SaltCloudSystemExit(
'The show_delvol_on_destroy action must be called '
'with -a or --action.'
)
if not kwargs:
kwargs = {}
instance_id = kwargs.get('instance_id', None)
device = kwargs.get('device', None)
volume_id = kwargs.get('volume_id', None)
if instance_id is None:
instances = list_nodes_full()
instance_id = instances[name]['instanceId']
params = {'Action': 'DescribeInstances',
'InstanceId.1': instance_id}
data, requesturl = query(params, return_url=True)
blockmap = data[0]['instancesSet']['item']['blockDeviceMapping']
if type(blockmap['item']) != list:
blockmap['item'] = [blockmap['item']]
items = []
for idx, item in enumerate(blockmap['item']):
device_name = item['deviceName']
if device is not None and device != device_name:
continue
if volume_id is not None and volume_id != item['ebs']['volumeId']:
continue
info = {
'device_name': device_name,
'volume_id': item['ebs']['volumeId'],
'deleteOnTermination': item['ebs']['deleteOnTermination']
}
items.append(info)
return items
def keepvol_on_destroy(name, kwargs=None, call=None):
'''
Do not delete all/specified EBS volumes upon instance termination
CLI Example::
salt-cloud -a keepvol_on_destroy mymachine
'''
if call != 'action':
raise SaltCloudSystemExit(
'The keepvol_on_destroy action must be called with -a or --action.'
)
if not kwargs:
kwargs = {}
device = kwargs.get('device', None)
volume_id = kwargs.get('volume_id', None)
return _toggle_delvol(name=name, device=device,
volume_id=volume_id, value='false')
def delvol_on_destroy(name, kwargs=None, call=None):
'''
Delete all/specified EBS volumes upon instance termination
CLI Example::
salt-cloud -a delvol_on_destroy mymachine
'''
if call != 'action':
raise SaltCloudSystemExit(
'The delvol_on_destroy action must be called with -a or --action.'
)
if not kwargs:
kwargs = {}
device = kwargs.get('device', None)
volume_id = kwargs.get('volume_id', None)
return _toggle_delvol(name=name, device=device,
volume_id=volume_id, value='true')
def _toggle_delvol(name=None, instance_id=None, device=None, volume_id=None,
value=None, requesturl=None):
if not instance_id:
instances = list_nodes_full(get_location())
instance_id = instances[name]['instanceId']
if requesturl:
data = query(requesturl=requesturl)
else:
params = {'Action': 'DescribeInstances',
'InstanceId.1': instance_id}
data, requesturl = query(params, return_url=True)
blockmap = data[0]['instancesSet']['item']['blockDeviceMapping']
params = {'Action': 'ModifyInstanceAttribute',
'InstanceId': instance_id}
if type(blockmap['item']) != list:
blockmap['item'] = [blockmap['item']]
for idx, item in enumerate(blockmap['item']):
device_name = item['deviceName']
if device is not None and device != device_name:
continue
if volume_id is not None and volume_id != item['ebs']['volumeId']:
continue
params['BlockDeviceMapping.%d.DeviceName' % (idx)] = device_name
params['BlockDeviceMapping.%d.Ebs.DeleteOnTermination' % (idx)] = value
query(params, return_root=True)
return query(requesturl=requesturl)
def create_volume(kwargs=None, call=None):
'''
Create a volume
'''
if call != 'function':
log.error(
'The create_volume function must be called with -f or --function.'
)
return False
if 'zone' not in kwargs:
log.error('An availability zone must be specified to create a volume.')
return False
if 'size' not in kwargs and 'snapshot' not in kwargs:
# This number represents GiB
kwargs['size'] = '10'
params = {'Action': 'CreateVolume',
'AvailabilityZone': kwargs['zone']}
if 'size' in kwargs:
params['Size'] = kwargs['size']
if 'snapshot' in kwargs:
params['SnapshotId'] = kwargs['snapshot']
if 'type' in kwargs:
params['VolumeType'] = kwargs['type']
if 'iops' in kwargs and kwargs.get('type', 'standard') == 'io1':
params['Iops'] = kwargs['iops']
log.debug(params)
data = query(params, return_root=True)
# Wait a few seconds to make sure the volume
# has had a chance to shift to available state
# TODO: Should probably create a util method to
# wait for available status and fail on others
time.sleep(5)
return data
def attach_volume(name=None, kwargs=None, instance_id=None, call=None):
'''
Attach a volume to an instance
'''
if call != 'action':
raise SaltCloudSystemExit(
'The attach_volume action must be called with -a or --action.'
)
if not kwargs:
kwargs = {}
if 'instance_id' in kwargs:
instance_id = kwargs['instance_id']
if name and not instance_id:
instances = list_nodes_full(get_location())
instance_id = instances[name]['instanceId']
if not name and not instance_id:
log.error('Either a name or an instance_id is required.')
return False
if 'volume_id' not in kwargs:
log.error('A volume_id is required.')
return False
if 'device' not in kwargs:
log.error('A device is required (ex. /dev/sdb1).')
return False
params = {'Action': 'AttachVolume',
'VolumeId': kwargs['volume_id'],
'InstanceId': instance_id,
'Device': kwargs['device']}
log.debug(params)
data = query(params, return_root=True)
return data
def show_volume(name=None, kwargs=None, instance_id=None, call=None):
'''
Show volume details
'''
if not kwargs:
kwargs = {}
if 'volume_id' not in kwargs:
log.error('A volume_id is required.')
return False
params = {'Action': 'DescribeVolumes',
'VolumeId.1': kwargs['volume_id']}
data = query(params, return_root=True)
return data
def detach_volume(name=None, kwargs=None, instance_id=None, call=None):
'''
Detach a volume from an instance
'''
if call != 'action':
raise SaltCloudSystemExit(
'The detach_volume action must be called with -a or --action.'
)
if not kwargs:
kwargs = {}
if 'volume_id' not in kwargs:
log.error('A volume_id is required.')
return False
params = {'Action': 'DetachVolume',
'VolumeId': kwargs['volume_id']}
data = query(params, return_root=True)
return data
def delete_volume(name=None, kwargs=None, instance_id=None, call=None):
'''
Delete a volume
'''
if not kwargs:
kwargs = {}
if 'volume_id' not in kwargs:
log.error('A volume_id is required.')
return False
params = {'Action': 'DeleteVolume',
'VolumeId': kwargs['volume_id']}
data = query(params, return_root=True)
return data
def create_keypair(kwargs=None, call=None):
'''
Create an SSH keypair
'''
if call != 'function':
log.error(
'The create_keypair function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
if 'keyname' not in kwargs:
log.error('A keyname is required.')
return False
params = {'Action': 'CreateKeyPair',
'KeyName': kwargs['keyname']}
data = query(params, return_root=True)
return data
def show_keypair(kwargs=None, call=None):
'''
Show the details of an SSH keypair
'''
if call != 'function':
log.error(
'The show_keypair function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
if 'keyname' not in kwargs:
log.error('A keyname is required.')
return False
params = {'Action': 'DescribeKeyPairs',
'KeyName.1': kwargs['keyname']}
data = query(params, return_root=True)
return data
def delete_keypair(kwargs=None, call=None):
'''
Delete an SSH keypair
'''
if call != 'function':
log.error(
'The delete_keypair function must be called with -f or --function.'
)
return False
if not kwargs:
kwargs = {}
if 'keyname' not in kwargs:
log.error('A keyname is required.')
return False
params = {'Action': 'DeleteKeyPair',
'KeyName.1': kwargs['keyname']}
data = query(params, return_root=True)
return data
| MadeiraCloud/salt | sources/salt/cloud/clouds/ec2.py | Python | apache-2.0 | 74,589 |
# Code from http://fi.am/entry/plugging-a-rss-feed-into-a-django-template/
from datetime import datetime
import time
from django.core.cache import cache
from django import template
import feedparser
register = template.Library()
@register.filter
def todatetime(value):
return datetime(*time.localtime(time.mktime(value))[:6])
@register.tag
def rssplug(parser, token):
try:
tag_name, address, template = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError('%s tag requires 2 arguments' % token.split_contents()[0])
return RssPlugNode(address, template)
class RssPlugNode(template.Node):
def __init__(self, address, templ):
self.address = template.Variable(address)
self.templ = template.Variable(templ)
def rss(self, addr):
ckey = 'rssplug_%s' % addr
data = cache.get(ckey)
if data:
return data
data = feedparser.parse(addr)
cache.set(ckey, data)
return data
def render(self, context):
address = self.address.resolve(context)
tmpl = self.templ.resolve(context)
t = template.loader.get_template(tmpl)
return ''.join([t.render(template.Context({ 'item': item })) for item in self.rss(address).entries]) | DraXus/andaluciapeople | sitios/templatetags/rssplug.py | Python | agpl-3.0 | 1,286 |
import pandas as pd
import numpy as np
import pyaf.HierarchicalForecastEngine as hautof
import pyaf.Bench.TS_datasets as tsds
import datetime
#get_ipython().magic('matplotlib inline')
b1 = tsds.load_AU_infant_grouped_dataset();
# reduce the number of possible values of State.
b1.mHierarchy['Groups']['State'] = ["NSW","VIC"];
df = b1.mPastData;
lEngine = hautof.cHierarchicalForecastEngine()
lEngine.mOptions.mHierarchicalCombinationMethod = ["BU" , 'TD' , 'MO' , 'OC'];
lEngine
H = b1.mHorizon;
# lEngine.mOptions.enable_slow_mode();
# lEngine.mOptions.mDebugPerformance = True;
lEngine.mOptions.set_active_autoregressions([]);
lEngine.train(df , b1.mTimeVar , b1.mSignalVar, H, b1.mHierarchy, None);
lEngine.getModelInfo();
#lEngine.standardPlots("outputs/AU_infant_");
dfapp_in = df.copy();
dfapp_in.tail()
dfapp_out = lEngine.forecast(dfapp_in, H);
#dfapp_out.to_csv("outputs/Grouped_AU_apply_out.csv")
| antoinecarme/pyaf | tests/bugs/issue_56/issue_56_order1.py | Python | bsd-3-clause | 920 |
from django.conf.urls import url
from django.contrib import messages
from django.contrib.admin import ModelAdmin
from django.db.models import Q
from django.template.response import TemplateResponse
from django.templatetags.static import static
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from openwisp_utils.admin import ReceiveUrlAdmin
from .. import settings as app_settings
from ..contextmanagers import log_failure
from ..visualizer import GraphVisualizerUrls
class TimeStampedEditableAdmin(ModelAdmin):
"""
ModelAdmin for TimeStampedEditableModel
"""
def __init__(self, *args, **kwargs):
self.readonly_fields += (
'created',
'modified',
)
super().__init__(*args, **kwargs)
class BaseAdmin(TimeStampedEditableAdmin):
save_on_top = True
class Media:
css = {
'all': [
static('netjsongraph/css/src/netjsongraph.css'),
static('netjsongraph/css/lib/jquery-ui.min.css'),
static('netjsongraph/css/style.css'),
static('netjsongraph/css/admin.css'),
]
}
js = [
static('netjsongraph/js/lib/d3.min.js'),
static('netjsongraph/js/lib/jquery-ui.min.js'),
static('netjsongraph/js/src/netjsongraph.js'),
static('netjsongraph/js/strategy-switcher.js'),
static('netjsongraph/js/topology-history.js'),
static('netjsongraph/js/visualize.js'),
]
class AbstractTopologyAdmin(BaseAdmin, ReceiveUrlAdmin, GraphVisualizerUrls):
list_display = ['label', 'parser', 'strategy', 'published', 'created', 'modified']
readonly_fields = ['protocol', 'version', 'revision', 'metric', 'receive_url']
list_filter = ['parser', 'strategy']
search_fields = ['label', 'id']
actions = ['update_selected', 'unpublish_selected', 'publish_selected']
fields = [
'label',
'parser',
'strategy',
'url',
'key',
'expiration_time',
'receive_url',
'published',
'protocol',
'version',
'revision',
'metric',
'created',
]
receive_url_name = 'receive_topology'
receive_url_urlconf = app_settings.TOPOLOGY_API_URLCONF
receive_url_baseurl = app_settings.TOPOLOGY_API_BASEURL
def get_actions(self, request):
"""
move delete action to last position
"""
actions = super().get_actions(request)
delete = actions['delete_selected']
del actions['delete_selected']
actions['delete_selected'] = delete
return actions
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = extra_context or {}
prefix = 'admin:{0}_{1}'.format(
self.opts.app_label, self.model.__name__.lower()
)
text = _('View topology graph')
extra_context.update(
{
'additional_buttons': [
{
'type': 'button',
'url': reverse(
'{0}_visualize'.format(prefix), args=[object_id]
),
'class': 'visualizelink',
'value': text,
'title': '{0} (ALT+P)'.format(text),
}
]
}
)
return super().change_view(request, object_id, form_url, extra_context)
def get_urls(self):
options = getattr(self.model, '_meta')
url_prefix = '{0}_{1}'.format(options.app_label, options.model_name)
return [
url(
r'^visualize/(?P<pk>[^/]+)/$',
self.admin_site.admin_view(self.visualize_view),
name='{0}_visualize'.format(url_prefix),
),
] + super().get_urls()
def _message(self, request, rows, suffix, level=messages.SUCCESS):
if rows == 1:
prefix = _('1 {0} was'.format(self.model._meta.verbose_name))
else: # pragma: nocover
prefix = _(
'{0} {1} were'.format(rows, self.model._meta.verbose_name_plural)
)
self.message_user(request, '{0} {1}'.format(prefix, suffix), level=level)
def update_selected(self, request, queryset):
items = list(queryset)
failed = []
ignored = []
for item in items:
if item.strategy == 'fetch':
try:
item.update()
except Exception as e:
failed.append('{0}: {1}'.format(item.label, str(e)))
with log_failure('update topology admin action', item):
raise e
else:
ignored.append(item)
# remove item from items if ignored.
for item in ignored:
if item in items:
items.remove(item)
failures = len(failed)
successes = len(items) - failures
total_ignored = len(ignored)
if successes > 0:
self._message(request, successes, _('successfully updated'))
if failures > 0:
message = _('not updated. %s') % '; '.join(failed)
self._message(request, failures, message, level=messages.ERROR)
if total_ignored > 0:
message = _("ignored (not using FETCH strategy)")
self._message(request, total_ignored, message, level=messages.WARNING)
update_selected.short_description = _(
'Update selected topologies (FETCH strategy only)'
)
def publish_selected(self, request, queryset):
rows_updated = queryset.update(published=True)
self._message(request, rows_updated, _('successfully published'))
publish_selected.short_description = _('Publish selected topologies')
def unpublish_selected(self, request, queryset):
rows_updated = queryset.update(published=False)
self._message(request, rows_updated, _('successfully unpublished'))
unpublish_selected.short_description = _('Unpublish selected items')
def visualize_view(self, request, pk):
graph_url, history_url = self.get_graph_urls(request, pk)
context = self.admin_site.each_context(request)
opts = self.model._meta
context.update(
{
'is_popup': True,
'opts': opts,
'change': False,
'media': self.media,
'graph_url': graph_url,
'history_url': history_url,
}
)
return TemplateResponse(
request, 'admin/%s/visualize.html' % opts.app_label, context
)
class AbstractNodeAdmin(BaseAdmin):
list_display = ['name', 'topology', 'addresses']
list_filter = ['topology']
search_fields = ['addresses', 'label', 'properties']
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = extra_context or {}
link_model = self.model.source_link_set.field.model
admin_url = 'admin:{0}_link_change'.format(self.opts.app_label)
extra_context.update(
{
'node_links': link_model.objects.select_related('source', 'target')
.only('source__label', 'target__label', 'cost', 'status')
.filter(Q(source_id=object_id) | Q(target_id=object_id)),
'admin_url': admin_url,
}
)
return super().change_view(request, object_id, form_url, extra_context)
class AbstractLinkAdmin(BaseAdmin):
raw_id_fields = ['source', 'target']
list_display = ['__str__', 'topology', 'status', 'cost', 'cost_text']
list_filter = ['status', 'topology']
search_fields = [
'source__label',
'target__label',
'source__addresses',
'target__addresses',
'properties',
]
| interop-dev/django-netjsongraph | django_netjsongraph/base/admin.py | Python | mit | 7,984 |
"""
Copyright 2015 Matthew D. Ball (M4Numbers)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = 'Matthew Ball'
import unittest
class ImgurCrawlerTest(unittest.TestCase):
def setUp(self):
self.ic = 0
"""def test_profile(self):
ret = self.ic.request_profile('M4Numbers')
print(ret.bio)"""
if __name__ == '__main__':
unittest.main()
| M4Numbers/Walkers | tests/ImgurCrawlerTest.py | Python | apache-2.0 | 898 |
# -*- coding: utf-8 -*-
#
# Link Attachment in Website
# Copyright (C) 2014 Xpansa Group (<http://xpansa.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import controllers
import models
| xpansa/website | website_link_attachment/__init__.py | Python | agpl-3.0 | 840 |
import sys; sys.path[0:0] = './' # Puts current directory at the start of path
import settings
import pymc as pm
import numpy as np
import scipy as sp
import detrital_ui.common as ba
from detrital_ui.data_types import *
BIN = True
err = pm.Uniform('RelErr',settings.error_prior[0],settings.error_prior[1],plot=True)
e, hc, abr = ba.InitExhumation(settings)
idx, sim_idx, zprime, sim_zprime = [], [], [], []
exp, obs, sim = [], [], []
for sample in settings.samples:
#import ipdb;ipdb.set_trace()
hc[sample.tc_type].plot=True
parms = e+[hc[sample.tc_type]]+abr
if hasattr(sample, 'xyz'): #Bedrock sample
z = sample.xyz[:,2] - sample.ref_z #Warning: ignoring ref_z
idx.append(range(len(z)))
#Store expected ages for plotting
#This is slower that computing expected profiles from parms posterior.
#exp_profile = pm.Lambda('ExpProfile_%s' % sample.name,
# lambda parms=parms:
# ba.h2a(sample.get_zrange(), parms),
# plot=False)
#nodes.append(exp_profile)
else: #Detrital sample
if BIN:
z = sample.catchment.bins['h']-sample.ref_z
p_samp = sample.catchment.bins['w']
else:
z = sample.catchment.xyz[:,2]-sample.ref_z
p_samp = np.ones(len(z))/len(z) #Uniform
idx.append(pm.Categorical("Idx_" + sample.name,
p = p_samp, size=len(sample.ages),
plot=False, trace=False))
sim_idx.append(pm.Categorical("SimIdx_" + sample.name,
p = p_samp, size=len(sample.ages),
plot=False, trace=False))
sim_zprime.append(pm.Lambda('SimZ_' + sample.name, lambda z=z, i=sim_idx[-1]: z[i],
plot=False, trace=True))
# This is a leftover from the tilt model.
zprime.append(pm.Lambda('Z_' + sample.name, lambda z=z, i=idx[-1]: z[i],
plot=False, trace=True))
if hasattr(sample, 'xyz'): sim_zprime.append(zprime[-1])
exp.append(pm.Lambda("ExpAge_" + sample.name,
lambda z=zprime[-1], parms=parms: ba.h2a(z, parms),
plot=False))
if sample.use:
obs.append(ba.NormRelErr("ObsAge_" + sample.name,
value = sample.ages, mu=exp[-1], err = err,
observed=True))
# import ipdb;ipdb.set_trace()
@pm.deterministic(name="SimAge_" + sample.name, plot=False)
def sim_i(parms=parms, zprime=sim_zprime[-1], err=err):
exp = ba.h2a(zprime,parms)
return pm.rnormal(mu = exp, tau = ba.sig2tau(exp*err)),
sim.append(sim_i)
| cossatot/py_thermochron | detrital_ui/simple_model.py | Python | mit | 2,866 |
# mysql/pymysql.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mysql+pymysql
:name: PyMySQL
:dbapi: pymysql
:connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>\
[?<options>]
:url: http://www.pymysql.org/
Unicode
-------
Please see :ref:`mysql_unicode` for current recommendations on unicode
handling.
MySQL-Python Compatibility
--------------------------
The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
and targets 100% compatibility. Most behavioral notes for MySQL-python apply
to the pymysql driver as well.
"""
from .mysqldb import MySQLDialect_mysqldb
from ...util import langhelpers, py3k
class MySQLDialect_pymysql(MySQLDialect_mysqldb):
driver = 'pymysql'
description_encoding = None
# generally, these two values should be both True
# or both False. PyMySQL unicode tests pass all the way back
# to 0.4 either way. See [ticket:3337]
supports_unicode_statements = True
supports_unicode_binds = True
def __init__(self, server_side_cursors=False, **kwargs):
super(MySQLDialect_pymysql, self).__init__(**kwargs)
self.server_side_cursors = server_side_cursors
@langhelpers.memoized_property
def supports_server_side_cursors(self):
try:
cursors = __import__('pymysql.cursors').cursors
self._sscursor = cursors.SSCursor
return True
except (ImportError, AttributeError):
return False
@classmethod
def dbapi(cls):
return __import__('pymysql')
if py3k:
def _extract_error_code(self, exception):
if isinstance(exception.args[0], Exception):
exception = exception.args[0]
return exception.args[0]
dialect = MySQLDialect_pymysql
| fernandog/Medusa | ext/sqlalchemy/dialects/mysql/pymysql.py | Python | gpl-3.0 | 1,989 |
# -*- coding: utf-8 -*-
#
from rest_framework import viewsets
from rest_framework.exceptions import ValidationError
from django.db import transaction
from django.utils.translation import ugettext as _
from django.conf import settings
from orgs.mixins.api import RootOrgViewMixin
from common.permissions import IsValidUser
from perms.utils import AssetPermissionUtil
from ..models import CommandExecution
from ..serializers import CommandExecutionSerializer
from ..tasks import run_command_execution
class CommandExecutionViewSet(RootOrgViewMixin, viewsets.ModelViewSet):
serializer_class = CommandExecutionSerializer
permission_classes = (IsValidUser,)
def get_queryset(self):
return CommandExecution.objects.filter(
user_id=str(self.request.user.id)
)
def check_hosts(self, serializer):
data = serializer.validated_data
assets = data["hosts"]
system_user = data["run_as"]
util = AssetPermissionUtil(self.request.user)
util.filter_permissions(system_users=system_user.id)
permed_assets = util.get_assets().filter(id__in=[a.id for a in assets])
invalid_assets = set(assets) - set(permed_assets)
if invalid_assets:
msg = _("Not has host {} permission").format(
[str(a.id) for a in invalid_assets]
)
raise ValidationError({"hosts": msg})
def check_permissions(self, request):
if not settings.SECURITY_COMMAND_EXECUTION and request.user.is_common_user:
return self.permission_denied(request, "Command execution disabled")
return super().check_permissions(request)
def perform_create(self, serializer):
self.check_hosts(serializer)
instance = serializer.save()
instance.user = self.request.user
instance.save()
cols = self.request.query_params.get("cols", '80')
rows = self.request.query_params.get("rows", '24')
transaction.on_commit(lambda: run_command_execution.apply_async(
args=(instance.id,), kwargs={"cols": cols, "rows": rows},
task_id=str(instance.id)
))
| zsjohny/jumpserver | apps/ops/api/command.py | Python | gpl-2.0 | 2,150 |
"""Test Z-Wave config panel."""
import asyncio
import json
from unittest.mock import MagicMock, patch
import pytest
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import config
from homeassistant.components.zwave import DATA_NETWORK, const
from tests.mock.zwave import MockNode, MockValue, MockEntityValues
VIEW_NAME = 'api:config:zwave:device_config'
@pytest.fixture
def client(loop, hass, aiohttp_client):
"""Client to communicate with Z-Wave config views."""
with patch.object(config, 'SECTIONS', ['zwave']):
loop.run_until_complete(async_setup_component(hass, 'config', {}))
return loop.run_until_complete(aiohttp_client(hass.http.app))
@asyncio.coroutine
def test_get_device_config(client):
"""Test getting device config."""
def mock_read(path):
"""Mock reading data."""
return {
'hello.beer': {
'free': 'beer',
},
'other.entity': {
'do': 'something',
},
}
with patch('homeassistant.components.config._read', mock_read):
resp = yield from client.get(
'/api/config/zwave/device_config/hello.beer')
assert resp.status == 200
result = yield from resp.json()
assert result == {'free': 'beer'}
@asyncio.coroutine
def test_update_device_config(client):
"""Test updating device config."""
orig_data = {
'hello.beer': {
'ignored': True,
},
'other.entity': {
'polling_intensity': 2,
},
}
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch('homeassistant.components.config._read', mock_read), \
patch('homeassistant.components.config._write', mock_write):
resp = yield from client.post(
'/api/config/zwave/device_config/hello.beer', data=json.dumps({
'polling_intensity': 2
}))
assert resp.status == 200
result = yield from resp.json()
assert result == {'result': 'ok'}
orig_data['hello.beer']['polling_intensity'] = 2
assert written[0] == orig_data
@asyncio.coroutine
def test_update_device_config_invalid_key(client):
"""Test updating device config."""
resp = yield from client.post(
'/api/config/zwave/device_config/invalid_entity', data=json.dumps({
'polling_intensity': 2
}))
assert resp.status == 400
@asyncio.coroutine
def test_update_device_config_invalid_data(client):
"""Test updating device config."""
resp = yield from client.post(
'/api/config/zwave/device_config/hello.beer', data=json.dumps({
'invalid_option': 2
}))
assert resp.status == 400
@asyncio.coroutine
def test_update_device_config_invalid_json(client):
"""Test updating device config."""
resp = yield from client.post(
'/api/config/zwave/device_config/hello.beer', data='not json')
assert resp.status == 400
@asyncio.coroutine
def test_get_values(hass, client):
"""Test getting values on node."""
node = MockNode(node_id=1)
value = MockValue(value_id=123456, node=node, label='Test Label',
instance=1, index=2, poll_intensity=4)
values = MockEntityValues(primary=value)
node2 = MockNode(node_id=2)
value2 = MockValue(value_id=234567, node=node2, label='Test Label 2')
values2 = MockEntityValues(primary=value2)
hass.data[const.DATA_ENTITY_VALUES] = [values, values2]
resp = yield from client.get('/api/zwave/values/1')
assert resp.status == 200
result = yield from resp.json()
assert result == {
'123456': {
'label': 'Test Label',
'instance': 1,
'index': 2,
'poll_intensity': 4,
}
}
@asyncio.coroutine
def test_get_groups(hass, client):
"""Test getting groupdata on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
node.groups.associations = 'assoc'
node.groups.associations_instances = 'inst'
node.groups.label = 'the label'
node.groups.max_associations = 'max'
node.groups = {1: node.groups}
network.nodes = {2: node}
resp = yield from client.get('/api/zwave/groups/2')
assert resp.status == 200
result = yield from resp.json()
assert result == {
'1': {
'association_instances': 'inst',
'associations': 'assoc',
'label': 'the label',
'max_associations': 'max'
}
}
@asyncio.coroutine
def test_get_groups_nogroups(hass, client):
"""Test getting groupdata on node with no groups."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
network.nodes = {2: node}
resp = yield from client.get('/api/zwave/groups/2')
assert resp.status == 200
result = yield from resp.json()
assert result == {}
@asyncio.coroutine
def test_get_groups_nonode(hass, client):
"""Test getting groupdata on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
network.nodes = {1: 1, 5: 5}
resp = yield from client.get('/api/zwave/groups/2')
assert resp.status == 404
result = yield from resp.json()
assert result == {'message': 'Node not found'}
@asyncio.coroutine
def test_get_config(hass, client):
"""Test getting config on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
value = MockValue(
index=12,
command_class=const.COMMAND_CLASS_CONFIGURATION)
value.label = 'label'
value.help = 'help'
value.type = 'type'
value.data = 'data'
value.data_items = ['item1', 'item2']
value.max = 'max'
value.min = 'min'
node.values = {12: value}
network.nodes = {2: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/config/2')
assert resp.status == 200
result = yield from resp.json()
assert result == {'12': {'data': 'data',
'data_items': ['item1', 'item2'],
'help': 'help',
'label': 'label',
'max': 'max',
'min': 'min',
'type': 'type'}}
@asyncio.coroutine
def test_get_config_noconfig_node(hass, client):
"""Test getting config on node without config."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
network.nodes = {2: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/config/2')
assert resp.status == 200
result = yield from resp.json()
assert result == {}
@asyncio.coroutine
def test_get_config_nonode(hass, client):
"""Test getting config on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
network.nodes = {1: 1, 5: 5}
resp = yield from client.get('/api/zwave/config/2')
assert resp.status == 404
result = yield from resp.json()
assert result == {'message': 'Node not found'}
@asyncio.coroutine
def test_get_usercodes_nonode(hass, client):
"""Test getting usercodes on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
network.nodes = {1: 1, 5: 5}
resp = yield from client.get('/api/zwave/usercodes/2')
assert resp.status == 404
result = yield from resp.json()
assert result == {'message': 'Node not found'}
@asyncio.coroutine
def test_get_usercodes(hass, client):
"""Test getting usercodes on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_USER_CODE])
value = MockValue(
index=0,
command_class=const.COMMAND_CLASS_USER_CODE)
value.genre = const.GENRE_USER
value.label = 'label'
value.data = '1234'
node.values = {0: value}
network.nodes = {18: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/usercodes/18')
assert resp.status == 200
result = yield from resp.json()
assert result == {'0': {'code': '1234',
'label': 'label',
'length': 4}}
@asyncio.coroutine
def test_get_usercode_nousercode_node(hass, client):
"""Test getting usercodes on node without usercodes."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18)
network.nodes = {18: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/usercodes/18')
assert resp.status == 200
result = yield from resp.json()
assert result == {}
@asyncio.coroutine
def test_get_usercodes_no_genreuser(hass, client):
"""Test getting usercodes on node missing genre user."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_USER_CODE])
value = MockValue(
index=0,
command_class=const.COMMAND_CLASS_USER_CODE)
value.genre = const.GENRE_SYSTEM
value.label = 'label'
value.data = '1234'
node.values = {0: value}
network.nodes = {18: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/usercodes/18')
assert resp.status == 200
result = yield from resp.json()
assert result == {}
@asyncio.coroutine
def test_save_config_no_network(hass, client):
"""Test saving configuration without network data."""
resp = yield from client.post('/api/zwave/saveconfig')
assert resp.status == 404
result = yield from resp.json()
assert result == {'message': 'No Z-Wave network data found'}
@asyncio.coroutine
def test_save_config(hass, client):
"""Test saving configuration."""
network = hass.data[DATA_NETWORK] = MagicMock()
resp = yield from client.post('/api/zwave/saveconfig')
assert resp.status == 200
result = yield from resp.json()
assert network.write_config.called
assert result == {'message': 'Z-Wave configuration saved to file.'}
async def test_get_protection_values(hass, client):
"""Test getting protection values on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {18: node}
node.value = value
node.get_protection_item.return_value = "Unprotected"
node.get_protection_items.return_value = value.data_items
node.get_protections.return_value = {value.value_id: 'Object'}
resp = await client.get('/api/zwave/protection/18')
assert resp.status == 200
result = await resp.json()
assert node.get_protections.called
assert node.get_protection_item.called
assert node.get_protection_items.called
assert result == {
'value_id': '123456',
'selected': 'Unprotected',
'options': ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
}
async def test_get_protection_values_nonexisting_node(hass, client):
"""Test getting protection values on node with wrong nodeid."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {17: node}
node.value = value
resp = await client.get('/api/zwave/protection/18')
assert resp.status == 404
result = await resp.json()
assert not node.get_protections.called
assert not node.get_protection_item.called
assert not node.get_protection_items.called
assert result == {'message': 'Node not found'}
async def test_get_protection_values_without_protectionclass(hass, client):
"""Test getting protection values on node without protectionclass."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18)
value = MockValue(
value_id=123456,
index=0,
instance=1)
network.nodes = {18: node}
node.value = value
resp = await client.get('/api/zwave/protection/18')
assert resp.status == 200
result = await resp.json()
assert not node.get_protections.called
assert not node.get_protection_item.called
assert not node.get_protection_items.called
assert result == {}
async def test_set_protection_value(hass, client):
"""Test setting protection value on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {18: node}
node.value = value
resp = await client.post(
'/api/zwave/protection/18', data=json.dumps({
'value_id': '123456', 'selection': 'Protection by Sequence'}))
assert resp.status == 200
result = await resp.json()
assert node.set_protection.called
assert result == {'message': 'Protection setting succsessfully set'}
async def test_set_protection_value_failed(hass, client):
"""Test setting protection value failed on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {18: node}
node.value = value
node.set_protection.return_value = False
resp = await client.post(
'/api/zwave/protection/18', data=json.dumps({
'value_id': '123456', 'selection': 'Protecton by Seuence'}))
assert resp.status == 202
result = await resp.json()
assert node.set_protection.called
assert result == {'message': 'Protection setting did not complete'}
async def test_set_protection_value_nonexisting_node(hass, client):
"""Test setting protection value on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=17,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {17: node}
node.value = value
node.set_protection.return_value = False
resp = await client.post(
'/api/zwave/protection/18', data=json.dumps({
'value_id': '123456', 'selection': 'Protecton by Seuence'}))
assert resp.status == 404
result = await resp.json()
assert not node.set_protection.called
assert result == {'message': 'Node not found'}
async def test_set_protection_value_missing_class(hass, client):
"""Test setting protection value on node without protectionclass."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=17)
value = MockValue(
value_id=123456,
index=0,
instance=1)
network.nodes = {17: node}
node.value = value
node.set_protection.return_value = False
resp = await client.post(
'/api/zwave/protection/17', data=json.dumps({
'value_id': '123456', 'selection': 'Protecton by Seuence'}))
assert resp.status == 404
result = await resp.json()
assert not node.set_protection.called
assert result == {'message': 'No protection commandclass on this node'}
| persandstrom/home-assistant | tests/components/config/test_zwave.py | Python | apache-2.0 | 16,984 |
from chainer import cuda
class GradientHardClipping(object):
"""Optimizer/UpdateRule hook function for gradient clipping.
This hook function clips all gradient arrays to be within a lower and upper
bound.
Args:
lower_bound (float): The lower bound of the gradient value.
upper_bound (float): The upper bound of the gradient value.
Attributes:
~optimizer_hooks.GradientHardClipping.lower_bound (float): The
lower bound of the gradient value.
~optimizer_hooks.GradientHardClipping.upper_bound (float): The
upper bound of the gradient value.
~optimizer_hooks.GradientHardClipping.timing (string): Specifies
when this hook should be called by the
Optimizer/UpdateRule. Valid values are 'pre'
(before any updates) and 'post'
(after any updates).
~optimizer_hooks.GradientHardClipping.call_for_each_param (bool): \
Specifies if this hook is called for each parameter
(``True``) or only once (``False``) by an optimizer to
which this hook is registered. This function does
not expect users to switch the value from default one,
which is `True`.
.. versionadded:: 4.0.0
The *timing* parameter.
"""
name = 'GradientHardClipping'
call_for_each_param = True
timing = 'pre'
def __init__(self, lower_bound, upper_bound):
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def __call__(self, rule, param):
grad = param.grad
if grad is None:
return
xp = cuda.get_array_module(grad)
with cuda.get_device_from_array(grad):
xp.clip(grad, self.lower_bound, self.upper_bound, out=grad)
| rezoo/chainer | chainer/optimizer_hooks/gradient_hard_clipping.py | Python | mit | 1,937 |
# shipBonusMiningDroneAmountPercentRookie
#
# Used by:
# Ship: Gnosis
# Ship: Taipan
# Ship: Velator
type = "passive"
def handler(fit, container, context):
fit.drones.filteredItemBoost(lambda drone: drone.item.group.name == "Mining Drone",
"miningAmount", container.getModifiedItemAttr("rookieDroneBonus"))
| Ebag333/Pyfa | eos/effects/shipbonusminingdroneamountpercentrookie.py | Python | gpl-3.0 | 346 |
from django.contrib import admin
from modeltranslation.admin import TranslationAdmin
from exhibitions.models import Exhibition
class ExhibitionAdmin(TranslationAdmin):
readonly_fields = ('get_img_tag_for_admin',)
fieldsets = [
(
None, {
'fields': (
('begin', 'end',),
'title',
'showroom',
'showroom_url',
('image','get_img_tag_for_admin'),
)
}
),
]
list_filter = ['begin','end']
admin.site.register(Exhibition, ExhibitionAdmin)
| hombit/olgart | app/exhibitions/admin.py | Python | mit | 482 |
"""
Daplug keyset creation class
"""
from conv import *
class KeySet:
"""@KeySet"""
# Key constants
USAGE_GP = 0x01
"""@KeySet.USAGE_GP"""
USAGE_GP_AUTH = 0x02
"""@KeySet.USAGE_GP_AUTH"""
USAGE_HOTP = 0x03
"""@KeySet.USAGE_HOTP"""
USAGE_HOTP_VALIDATION = 0x04
"""@KeySet.USAGE_HOTP_VALIDATION"""
USAGE_OTP = 0x05
"""@KeySet.USAGE_OTP"""
USAGE_ENC = 0x06
"""@KeySet.USAGE_ENC"""
USAGE_DEC = 0x07
"""@KeySet.USAGE_DEC"""
USAGE_ENC_DEC = 0x08
"""@KeySet.USAGE_ENC_DEC"""
USAGE_SAM_CTX = 0x09
"""@KeySet.USAGE_SAM_CTX"""
USAGE_SAM_GP = 0x0A
"""@KeySet.USAGE_SAM_GP"""
USAGE_SAM_DIV1 = 0x0B
"""@KeySet.USAGE_SAM_DIV1"""
USAGE_SAM_DIV2 = 0x0C
"""@KeySet.USAGE_SAM_DIV2"""
USAGE_SAM_CLEAR_EXPORT_DIV1 = 0x0D
"""@KeySet.USAGE_SAM_CLEAR_EXPORT_DIV1"""
USAGE_SAM_CLEAR_EXPORT_DIV2 = 0x0E
"""@KeySet.USAGE_SAM_CLEAR_EXPORT_DIV2"""
USAGE_IMPORT_EXPORT_TRANSIENT = 0x0F
"""@KeySet.USAGE_IMPORT_EXPORT_TRANSIENT"""
USAGE_TOTP_TIME_SRC = 0x10
"""@KeySet.USAGE_TOTP_TIME_SRC"""
USAGE_TOTP = 0x11
"""@KeySet.USAGE_TOTP"""
USAGE_HMAC_SHA1 = 0x12
"""@KeySet.USAGE_HMAC_SHA1"""
def __init__(self, version=None, encKey=None, macKey=None, dekKey=None):
"""@KeySet.KeySet"""
self.usage = None
self.access = None
if (version is not None):
self.version = version
self.encKey = None
if encKey is not None:
self.encKey = hex2txt(encKey)
self.macKey = None
if macKey is not None:
self.macKey = hex2txt(macKey)
if macKey is None and encKey is not None:
self.macKey = self.encKey
self.dekKey = None
if dekKey is not None:
self.dekKey = hex2txt(dekKey)
if dekKey is None and encKey is not None:
self.dekKey = self.encKey
def setVersion(self, version):
"""@KeySet.setVersion"""
self.version = version
def getVersion(self):
"""@KeySet.getVersion"""
return self.version
def setKey(self, id, keyValue):
"""@KeySet.setKey"""
if (id == 0x01):
self.encKey = hex2txt(keyValue)
elif (id == 0x02):
self.macKey = hex2txt(keyValue)
elif (id == 0x03):
self.dekKey = hex2txt(keyValue)
else:
raise DaplugException(0x8000, "Invalid key number")
def getKey(self, id):
"""@KeySet.getKey"""
if (id == 0x01):
return txt2hex(self.encKey)
elif (id == 0x02):
return txt2hex(self.macKey)
elif (id == 0x03):
return txt2hex(self.dekKey)
else:
raise DaplugException(0x8000, "Invalid key number")
def setKeyUsage(self, usage):
"""@KeySet.setKeyUsage"""
self.usage = usage
def getKeyUsage(self):
"""@KeySet.getKeyUsage"""
return self.usage
def setKeyAccess(self, access):
"""@KeySet.setKeyAccess"""
self.access = access
def getKeyAccess(self):
"""@KeySet.getKeyAccess"""
return self.access
| Plug-up/daplug-python | daplug/keyset.py | Python | apache-2.0 | 3,442 |
"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A kaleidoscope pattern with icosahedral symmetry
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.colors import hsv_to_rgb
def Klein(z):
"""Klein's icosahedral invariants."""
return (
1728
* (z * (z ** 10 + 11 * z ** 5 - 1)) ** 5
/ (-(z ** 20 + 1) + 228 * (z ** 15 - z ** 5) - 494 * z ** 10) ** 3
)
def RiemannSphere(z):
"""Map the complex plane to Riemann's sphere via stereographic projection.
"""
t = 1 + z.real * z.real + z.imag * z.imag
return 2 * z.real / t, 2 * z.imag / t, 2 / t - 1
def Mobius(z):
"""Distort the resulting image by a Mobius transformation."""
return (z - 20) / (3 * z + 1j)
def main(imgsize):
y, x = np.ogrid[6 : -6 : imgsize * 2j, -6 : 6 : imgsize * 2j]
z = x + y * 1j
z = RiemannSphere(Klein(Mobius(Klein(z))))
# define colors in hsv space
H = np.sin(z[0] * np.pi) ** 2
S = np.cos(z[1] * np.pi) ** 2
V = abs(np.sin(z[2] * np.pi) * np.cos(z[2] * np.pi)) ** 0.2
HSV = np.stack((H, S, V), axis=2)
# transform to rgb space
img = hsv_to_rgb(HSV)
fig = plt.figure(figsize=(imgsize / 100.0, imgsize / 100.0), dpi=100)
ax = fig.add_axes([0, 0, 1, 1], aspect=1)
ax.axis("off")
ax.imshow(img)
fig.savefig("kaleidoscope.png")
if __name__ == "__main__":
main(imgsize=500)
| neozhaoliang/pywonderland | src/misc/kaleidoscope.py | Python | mit | 1,447 |
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from openerp import fields, models
class Summary(models.Model):
_inherit = 'myo.summary'
annotation_ids = fields.Many2many(
'myo.annotation',
'myo_summary_annotation_rel',
'summary_id',
'annotation_id',
'Annotations'
)
class Annotation(models.Model):
_inherit = 'myo.annotation'
summary_ids = fields.Many2many(
'myo.summary',
'myo_summary_annotation_rel',
'annotation_id',
'summary_id',
'Summaries'
)
| MostlyOpen/odoo_addons | myo_summary/models/annotation.py | Python | agpl-3.0 | 1,419 |
import sys
import xbmc,xbmcaddon
import sqlite3
ADDON = xbmcaddon.Addon(id='script.tvguide.Vader')
channel = sys.argv[1]
start = sys.argv[2]
if ADDON.getSetting('playing.channel') != channel:
quit()
elif ADDON.getSetting('playing.start') != start:
quit()
ADDON.setSetting('playing.channel','')
ADDON.setSetting('playing.start','')
xbmc.executebuiltin('PlayerControl(Stop)')
| ledtvavs/repository.ledtv | script.tvguide.Vader/stop.py | Python | gpl-3.0 | 386 |
import _plotly_utils.basevalidators
class SymbolValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="symbol", parent_name="scatter3d.marker", **kwargs):
super(SymbolValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
values=kwargs.pop(
"values",
[
"circle",
"circle-open",
"square",
"square-open",
"diamond",
"diamond-open",
"cross",
"x",
],
),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/scatter3d/marker/_symbol.py | Python | mit | 863 |
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from selenium.webdriver.common.by import By
from pages.base import Page
class PlatformFilter(Page):
class CheckboxFilter(Page):
_platform_checkbox_locator = (By.CSS_SELECTOR, ".bars[name='platform'] input")
_platforms_locator = (By.CSS_SELECTOR, "ul[name='platform'] li")
@property
def platforms(self):
"""Returns a list of Platform instances"""
return [self.Platform(self.testsetup, element) for element in self.selenium.find_elements(*self._platforms_locator)]
@property
def selected_platform(self):
"""Returns the currently selected platform."""
for platform in self.platforms:
if platform.is_selected:
return platform
def select_platform(self, value):
"""Selects a platform."""
select = self.selenium.find_element(
self._platform_checkbox_locator[0],
self._platform_checkbox_locator[1] + '[value="%s"]' % value)
if not select.is_selected():
select.click()
def unselect_platform(self, value):
select = self.selenium.find_element(
self._platform_checkbox_locator[0],
self._platform_checkbox_locator[1] + '[value="%s"]' % value)
if select.is_selected():
select.click()
def platform(self, name):
for platform in self.platforms:
if platform.name == name:
return platform
raise Exception('Platform not found: %s' % name)
class Platform(Page):
_checkbox_locator = (By.TAG_NAME, 'input')
_name_locator = (By.CSS_SELECTOR, 'label > span:nth-child(2)')
_message_count_locator = (By.CLASS_NAME, 'count')
def __init__(self, testsetup, element):
Page.__init__(self, testsetup)
self._root_element = element
@property
def is_selected(self):
return self._root_element.find_element(*self._checkbox_locator).is_selected()
@property
def name(self):
return self._root_element.find_element(*self._name_locator).text
@property
def code(self):
return self._root_element.find_element(*self._checkbox_locator).get_attribute('value')
@property
def message_count(self):
# TODO Use native mouse interactions to hover over element to get the text
message_count = self._root_element.find_element(*self._message_count_locator)
return int(self.selenium.execute_script('return arguments[0].textContent', message_count))
def click(self):
self._root_element.find_element(*self._checkbox_locator).click()
| staranjeet/fjord | smoketests/pages/regions/platform_filter.py | Python | bsd-3-clause | 3,098 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Dag Wieers (@dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: mso_schema_template_deploy
short_description: Deploy schema templates to sites
description:
- Deploy schema templates to sites.
author:
- Dag Wieers (@dagwieers)
version_added: '2.8'
options:
schema:
description:
- The name of the schema.
type: str
required: yes
template:
description:
- The name of the template.
type: str
aliases: [ name ]
site:
description:
- The name of the site B(to undeploy).
type: str
state:
description:
- Use C(deploy) to deploy schema template.
- Use C(status) to get deployment status.
- Use C(undeploy) to deploy schema template from a site.
type: str
choices: [ deploy, status, undeploy ]
default: deploy
seealso:
- module: mso_schema_site
- module: mso_schema_template
extends_documentation_fragment: mso
'''
EXAMPLES = r'''
- name: Deploy a schema template
mso_schema_template:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema 1
template: Template 1
state: deploy
delegate_to: localhost
- name: Undeploy a schema template
mso_schema_template:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema 1
template: Template 1
site: Site 1
state: undeploy
delegate_to: localhost
- name: Get deployment status
mso_schema:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema 1
template: Template 1
state: status
delegate_to: localhost
register: status_result
'''
RETURN = r'''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.mso import MSOModule, mso_argument_spec
def main():
argument_spec = mso_argument_spec()
argument_spec.update(
schema=dict(type='str', required=True),
template=dict(type='str', required=True, aliases=['name']),
site=dict(type='str'),
state=dict(type='str', default='deploy', choices=['deploy', 'status', 'undeploy']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'undeploy', ['site']],
],
)
schema = module.params['schema']
template = module.params['template']
site = module.params['site']
state = module.params['state']
mso = MSOModule(module)
# Get schema
schema_id = mso.lookup_schema(schema)
payload = dict(
schemaId=schema_id,
templateName=template,
)
qs = None
if state == 'deploy':
path = 'execute/schema/{0}/template/{1}'.format(schema_id, template)
elif state == 'status':
path = 'status/schema/{0}/template/{1}'.format(schema_id, template)
elif state == 'undeploy':
path = 'execute/schema/{0}/template/{1}'.format(schema_id, template)
site_id = mso.lookup_site(site)
qs = dict(undeploy=site_id)
if not module.check_mode:
status = mso.request(path, method='GET', data=payload, qs=qs)
mso.exit_json(**status)
if __name__ == "__main__":
main()
| tersmitten/ansible | lib/ansible/modules/network/aci/mso_schema_template_deploy.py | Python | gpl-3.0 | 3,548 |
#!/usr/env python
def fib(li, maxval):
if li[-1] >= maxval:
return li[:-1]
return fib(li + [li[-1] + li[-2]], maxval)
print sum([x for x in fib([1, 2], 4000000) if x % 2 == 0])
| kalikaneko/euler | 002/fibo.py | Python | lgpl-3.0 | 194 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from uuid import uuid4
import create_batch_prediction_job_text_classification_sample
import pytest
import helpers
PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT")
LOCATION = "us-central1"
MODEL_ID = "7827432074230366208" # Permanent restaurant rating model
DISPLAY_NAME = f"temp_create_batch_prediction_tcn_test_{uuid4()}"
GCS_SOURCE_URI = (
"gs://ucaip-samples-test-output/inputs/batch_predict_TCN/tcn_inputs.jsonl"
)
GCS_OUTPUT_URI = "gs://ucaip-samples-test-output/"
@pytest.fixture(scope="function", autouse=True)
def teardown(teardown_batch_prediction_job):
yield
@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Creating AutoML Text Classification batch prediction job
def test_ucaip_generated_create_batch_prediction_tcn_sample(capsys, shared_state):
model_name = f"projects/{PROJECT_ID}/locations/{LOCATION}/models/{MODEL_ID}"
create_batch_prediction_job_text_classification_sample.create_batch_prediction_job_text_classification_sample(
project=PROJECT_ID,
display_name=DISPLAY_NAME,
model_name=model_name,
gcs_source_uri=GCS_SOURCE_URI,
gcs_destination_output_uri_prefix=GCS_OUTPUT_URI,
)
out, _ = capsys.readouterr()
# Save resource name of the newly created batch prediction job
shared_state["batch_prediction_job_name"] = helpers.get_name(out)
| googleapis/python-aiplatform | samples/snippets/job_service/create_batch_prediction_job_text_classification_sample_test.py | Python | apache-2.0 | 1,972 |
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponse
from bootcamp.tasks.models import Task
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from bootcamp.tasks.forms import TaskForm
from django.contrib.auth.decorators import login_required
from bootcamp.decorators import ajax_required
import markdown
from django.template.loader import render_to_string
import requests,json
from bootcamp.utils.loadconfig import get_vars
@login_required
def dispcheckipam(request):
# task = get_object_or_404(Task, status=Task.ACTIVE)
return render(request, 'ipam/checkipam.html', {'task': "task"})
@login_required()
def fetchipamcheck(request):
destip = request.POST.get('destip')
return render(request, 'ipam/runcheckipam.html', {'destip': destip})
def runipamcheck(request):
emcbaseurl = get_vars('ansibengineemc')
mtnbaseurl = get_vars('ansibenginemtn')
if request.method == 'POST':
baseurl = request.POST.get('baseurl')
destip = request.POST.get('destip')
# if request.method == 'POST':
# baseurl = request.POST.get('baseurl')
emcurl = emcbaseurl+'/ansibengine/api/v1.0/checkipam'
mtnurl = mtnbaseurl+'/ansibengine/api/v1.0/checkipam'
headers = {'content-type': 'application/json'}
data= {}
temp={}
data['destip']= destip
try:
emcresponse = requests.post(emcurl, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
mtnresponse = requests.post(mtnurl, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
if not emcresponse.status_code == 200 or not mtnresponse.status_code == 200:
temp['value']="Error !! Unexpected response. Please report this"
return HttpResponse(json.dumps(temp), content_type = "application/json")
except requests.exceptions.RequestException as e:
# return "Error: {}".format(e)
temp['value']="Error connecting to API. Please report this"
return HttpResponse(json.dumps(temp), content_type = "application/json")
# if (json.loads(mtnresponse.text)['value']) is None:
# temp['value']="Entered Subnet :"+destip+"\n\n On EMC Network\n\n "+json.loads(emcresponse.text)['value']+"\n\n\n On MTN Network\n\nApi returned null"
# return HttpResponse(json.dumps(temp), content_type = "application/json")
# elif (json.loads(emcresponse.text)['value']) is None:
# temp['value']="Entered Subnet :"+destip+"\n\n On MTN Network\n\n "+json.loads(mtnresponse.text)['value']+"\n\n\n On EMC Network\n\nApi returned null"
# return HttpResponse(json.dumps(temp), content_type = "application/json")
temp['value']="Entered Subnet :"+destip+"\n\n On EMC Network\n\n "+json.loads(emcresponse.text)['value']+"\n\n\n On MTN Network\n\n "+json.loads(mtnresponse.text)['value']+" "
return HttpResponse(json.dumps(temp), content_type = "application/json")
| davismathew/netbot-django | bootcamp/ipam/views.py | Python | mit | 3,045 |
#!/usr/bin/env python
# Diffing two jar files can be a surprisingly subtle task. It is possible
# for jar files to differ in packaging, but have the same content (examples:
# when they are zipped using different compresssion levels, or if the
# timestamps for entries differ, or if the ordering inside the jar is
# different.
#
# A common trick to diff two jar files is to unzip their contents to a
# directory and recursively diff the two directories, but this is subtly
# tricky: zip files may have duplicate entries with the same name, in
# which case one must be careful not to allow later entries to overwrite
# earlier entries, since the default Java classloader will only see the
# first entry in the zipfile.
from __future__ import print_function
import argparse
import os
import re
import shutil
import subprocess
import sys
import tempfile
from zipfile import ZipFile
class JarDiffer(object):
def __init__(self, args):
self.parse_args(args)
self.tmpdir = tempfile.mkdtemp()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
shutil.rmtree(self.tmpdir)
def parse_args(self, args):
parser = argparse.ArgumentParser(
prog=args[0],
description="Compares the contents of two jar files.",
add_help=True,
)
parser.add_argument(
"-d",
"--diff-flags",
help="Flags to pass to diff when comparing non-class files",
default="-d",
)
parser.add_argument(
"-p",
"--javap-flags",
help="Flags to pass to javap when comparing class files",
default="-p -s -sysinfo",
)
parser.add_argument(
"-r",
"--raw-javap-output",
help="Show raw (unsanitized) javap output.",
action="store_true",
)
parser.add_argument(
"-o",
"--output",
help="File to direct output (default: stdout)",
type=argparse.FileType("w", 0),
default=sys.stdout,
)
parser.add_argument("jarfile", help="The jar files to compare", nargs=2)
options = parser.parse_args(args[1:])
self.jar1 = options.jarfile[0]
self.jar2 = options.jarfile[1]
self.javap_flags = [f for f in options.javap_flags.split() if f]
self.should_sanitize_javap = not options.raw_javap_output
self.diff_flags = [f for f in options.diff_flags.split() if f]
self.output = options.output
def write_contents(self, index, entry_name, entry_contents):
dest = os.path.join(self.tmpdir, str(index), entry_name)
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
with open(dest, "wb") as f:
f.write(entry_contents)
return os.path.join(str(index), entry_name)
@staticmethod
def sanitize_javap_output(content):
content = re.sub(r"\d+:", "_:", content)
content = re.sub(r"#\d+", "#_", content)
return content
def javap(self, index, entry_name, entry_contents):
flags = self.javap_flags
filename = self.write_contents(index, entry_name, entry_contents)
cmd = ["javap"] + flags + [filename]
output = subprocess.check_output(cmd, cwd=self.tmpdir)
if self.should_sanitize_javap:
output = JarDiffer.sanitize_javap_output(output)
return output
def diff_content(self, entry_name, entry_contents1, entry_contents2):
filename1 = self.write_contents(1, entry_name, entry_contents1)
filename2 = self.write_contents(2, entry_name, entry_contents2)
cmd = ["diff"] + self.diff_flags + [filename1, filename2]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=self.tmpdir)
(stdoutdata, _) = p.communicate()
return p.returncode, stdoutdata
def diff_classes(self, entry_name, entry_contents1, entry_contents2):
javap1 = self.javap(1, entry_name, entry_contents1)
javap2 = self.javap(2, entry_name, entry_contents2)
return self.diff_content(entry_name + ".javap", javap1, javap2)
def diff_zipinfos(self, zipfile1, zipinfo1, zipfile2, zipinfo2):
contents1 = zipfile1.read(zipinfo1)
contents2 = zipfile2.read(zipinfo2)
if contents1 == contents2:
return True
filename = zipinfo1.filename
if filename.endswith(".class"):
returncode, out = self.diff_classes(filename, contents1, contents2)
else:
returncode, out = self.diff_content(filename, contents1, contents2)
if returncode != 0:
print("Files differ: %s\n%s" % (filename, out), file=self.output)
return returncode
@staticmethod
def zipinfo_by_name(zipfile):
result = {}
for zipinfo in zipfile.infolist():
result.setdefault(zipinfo.filename, []).append(zipinfo)
return result
def run(self):
return_code = 0
absjar1 = os.path.abspath(self.jar1)
absjar2 = os.path.abspath(self.jar2)
print("Comparing:\n1: %s\n2: %s" % (absjar1, absjar2), file=sys.stderr)
with ZipFile(absjar1) as zipfile1, ZipFile(absjar2) as zipfile2:
zipinfos1_by_name = JarDiffer.zipinfo_by_name(zipfile1)
zipinfos2_by_name = JarDiffer.zipinfo_by_name(zipfile2)
all_files = sorted(set(zipinfos1_by_name.keys() + zipinfos2_by_name.keys()))
for name in all_files:
zipinfos1 = zipinfos1_by_name.get(name, [])
zipinfos2 = zipinfos2_by_name.get(name, [])
while zipinfos1 or zipinfos2:
if not zipinfos1:
print("Only in %s: %s" % (self.jar2, name), file=self.output)
zipinfos2.pop()
return_code = 1
elif not zipinfos2:
print("Only in %s: %s" % (self.jar1, name), file=self.output)
zipinfos1.pop()
return_code = 1
continue
elif not self.diff_zipinfos(
zipfile1, zipinfos1.pop(), zipfile2, zipinfos2.pop()
):
return_code = 1
return return_code
if __name__ == "__main__":
with JarDiffer(sys.argv) as differ:
differ.run()
| shs96c/buck | scripts/jardiffer.py | Python | apache-2.0 | 6,484 |
# -*- coding: utf-8 -*-
"""
:mod:`traceview.formatters`
This module contains functions used to format TraceView API results.
"""
from collections import namedtuple
def identity(results):
return results
def tuplify(results, class_name='Result'):
""" Formats API results into :class:`namedtuple` objects. Supports
tuplifying results that are either timeseries data or objects (dicts).
:param results: TraceView API results.
:param str class_name: (optional) Prefix string for name of the namedtuple.
Usage::
>>> import traceview
>>> from traceview.formatters import tuplify
>>> tv = traceview.TraceView('API KEY HERE', tuplify)
>>> tv.total_requests.summary('APP NAME HERE')
ResultTuple(reqs_per_time_period=u'19.53/sec', total_requests=70293.0)
"""
# is timeseries data?
if 'fields' in results and 'items' in results:
return _tuplify_timeseries(results, class_name)
# is object/dict?
if hasattr(results, 'keys'):
return _tuplify_dict(results, class_name)
return results
def _tuplify_timeseries(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results['fields'])
return [nt(*item) for item in results['items']]
def _tuplify_dict(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results.keys())
return nt(**results)
| danriti/python-traceview | traceview/formatters.py | Python | mit | 1,451 |
# coding=utf-8
# Author: Dustyn Gibson <miigotu@gmail.com>
#
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
from requests.utils import dict_from_cookiejar
from urllib import quote_plus
from sickbeard import logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size, try_int
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class HDTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
def __init__(self):
TorrentProvider.__init__(self, "HDTorrents")
self.username = None
self.password = None
self.minseed = None
self.minleech = None
self.freeleech = None
self.urls = {'base_url': 'https://hd-torrents.org',
'login': 'https://hd-torrents.org/login.php',
'search': 'https://hd-torrents.org/torrents.php?search=%s&active=1&options=0%s',
'rss': 'https://hd-torrents.org/torrents.php?search=&active=1&options=0%s',
'home': 'https://hd-torrents.org/%s'}
self.url = self.urls['base_url']
self.categories = "&category[]=59&category[]=60&category[]=30&category[]=38&category[]=65"
self.proper_strings = ['PROPER', 'REPACK']
self.cache = tvcache.TVCache(self, min_time=30) # only poll HDTorrents every 30 minutes max
def _check_auth(self):
if not self.username or not self.password:
logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
return True
def login(self):
if any(dict_from_cookiejar(self.session.cookies).values()):
return True
login_params = {'uid': self.username,
'pwd': self.password,
'submit': 'Confirm'}
response = self.get_url(self.urls['login'], post_data=login_params, returns='text')
if not response:
logger.log(u"Unable to connect to provider", logger.WARNING)
return False
if re.search('You need cookies enabled to log in.', response):
logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
return False
return True
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
results = []
if not self.login():
return results
for mode in search_strings:
items = []
logger.log(u"Search Mode: {0}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
search_url = self.urls['search'] % (quote_plus(search_string), self.categories)
logger.log(u"Search string: {0}".format
(search_string.decode("utf-8")), logger.DEBUG)
else:
search_url = self.urls['rss'] % self.categories
if self.freeleech:
search_url = search_url.replace('active=1', 'active=5')
data = self.get_url(search_url, returns='text')
if not data or 'please try later' in data:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
if data.find('No torrents here') != -1:
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
# Search result page contains some invalid html that prevents html parser from returning all data.
# We cut everything before the table that contains the data we are interested in thus eliminating
# the invalid html portions
try:
index = data.lower().index('<table class="mainblockcontenttt"')
except ValueError:
logger.log(u"Could not find table of torrents mainblockcontenttt", logger.DEBUG)
continue
data = data[index:]
with BS4Parser(data, 'html5lib') as html:
if not html:
logger.log(u"No html data parsed from provider", logger.DEBUG)
continue
torrent_rows = []
torrent_table = html.find('table', class_='mainblockcontenttt')
if torrent_table:
torrent_rows = torrent_table('tr')
if not torrent_rows:
logger.log(u"Could not find results in returned data", logger.DEBUG)
continue
# Cat., Active, Filename, Dl, Wl, Added, Size, Uploader, S, L, C
labels = [label.a.get_text(strip=True) if label.a else label.get_text(strip=True) for label in torrent_rows[0]('td')]
# Skip column headers
for result in torrent_rows[1:]:
try:
cells = result.findChildren('td')[:len(labels)]
if len(cells) < len(labels):
continue
title = cells[labels.index(u'Filename')].a.get_text(strip=True)
seeders = try_int(cells[labels.index(u'S')].get_text(strip=True))
leechers = try_int(cells[labels.index(u'L')].get_text(strip=True))
torrent_size = cells[labels.index(u'Size')].get_text()
size = convert_size(torrent_size) or -1
download_url = self.url + '/' + cells[labels.index(u'Dl')].a['href']
except (AttributeError, TypeError, KeyError, ValueError, IndexError):
continue
if not all([title, download_url]):
continue
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers), logger.DEBUG)
continue
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers), logger.DEBUG)
items.append(item)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
provider = HDTorrentsProvider()
| Maximilian-Reuter/SickRage-1 | sickbeard/providers/hdtorrents.py | Python | gpl-3.0 | 7,743 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def query_external_sheets_permanent_table(dataset_id):
# [START bigquery_query_external_sheets_perm]
from google.cloud import bigquery
import google.auth
# Create credentials with Drive & BigQuery API scopes.
# Both APIs must be enabled for your project before running this code.
#
# If you are using credentials from gcloud, you must authorize the
# application first with the following command:
#
# gcloud auth application-default login \
# --scopes=https://www.googleapis.com/auth/drive,https://www.googleapis.com/auth/cloud-platform
credentials, project = google.auth.default(
scopes=[
"https://www.googleapis.com/auth/drive",
"https://www.googleapis.com/auth/bigquery",
]
)
# Construct a BigQuery client object.
client = bigquery.Client(credentials=credentials, project=project)
# TODO(developer): Set dataset_id to the ID of the dataset to fetch.
# dataset_id = "your-project.your_dataset"
# Configure the external data source.
dataset = client.get_dataset(dataset_id)
table_id = "us_states"
schema = [
bigquery.SchemaField("name", "STRING"),
bigquery.SchemaField("post_abbr", "STRING"),
]
table = bigquery.Table(dataset.table(table_id), schema=schema)
external_config = bigquery.ExternalConfig("GOOGLE_SHEETS")
# Use a shareable link or grant viewing access to the email address you
# used to authenticate with BigQuery (this example Sheet is public).
sheet_url = (
"https://docs.google.com/spreadsheets"
"/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing"
)
external_config.source_uris = [sheet_url]
external_config.options.skip_leading_rows = 1 # Optionally skip header row.
external_config.options.range = (
"us-states!A20:B49" # Optionally set range of the sheet to query from.
)
table.external_data_configuration = external_config
# Create a permanent table linked to the Sheets file.
table = client.create_table(table) # Make an API request.
# Example query to find states starting with "W".
sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id)
query_job = client.query(sql) # Make an API request.
# Wait for the query to complete.
w_states = list(query_job)
print(
"There are {} states with names starting with W in the selected range.".format(
len(w_states)
)
)
# [END bigquery_query_external_sheets_perm]
| googleapis/python-bigquery | samples/query_external_sheets_permanent_table.py | Python | apache-2.0 | 3,135 |
import dbus
import nm
import nmdevice
DBUS_INTERFACE_DEVICE_WIRED = "org.freedesktop.NetworkManager.Device.Wired"
class WiredDevice(nmdevice.Device):
def __init__(self, path):
nmdevice.Device.__init__(self, path)
def get_hw_address(self):
return self.get_property(DBUS_INTERFACE_DEVICE_WIRED, "HwAddress")
def get_speed(self):
return self.get_property(DBUS_INTERFACE_DEVICE_WIRED, "Speed")
def __str__(self):
return "ethernet"
def get_properties(self):
props = nmdevice.Device.get_properties(self)
props.append(("HWAddress", self.get_hw_address()))
props.append(("Speed", str(self.get_speed())))
return props
nm.register_device_type(1, WiredDevice)
| mvidner/nmcli | src/nmwireddevicecmds.py | Python | gpl-2.0 | 743 |
# -*- coding: utf-8 -*-
##Copyright (C) [2003-2013] [Jürgen Hamel, D-32584 Löhne]
##This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as
##published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
##This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
##warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
##for more details.
##You should have received a copy of the GNU General Public License along with this program; if not, write to the
##Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# Test with new environment 20130917 v1
import sys
from types import *
import pygtk
pygtk.require('2.0')
import gtk
#import gtk.glade
import gobject
import locale
from locale import gettext as _
from cuon.Databases.SingleData import SingleData
import SingleArticle
import SingleArticlePurchase
import SingleArticleSale
import SingleArticleWebshop
import SingleArticleStock
import SingleArticleParts
import SingleArticleWebshop
import SingleArticleBarcode
import logging
from cuon.Windows.chooseWindows import chooseWindows
import cuon.Addresses.addresses
import cuon.Addresses.SingleAddress
import cuon.DMS.documentTools
import cuon.DMS.dms
import cuon.Articles.lists_articles_number1
import cuon.Articles.pickles_articles
import cuon.Articles.materialgroup
import cuon.Articles.SingleMaterialgroups
import cuon.PrefsFinance.SinglePrefsFinanceVat
# Assosiated
try:
import cuon.Garden.botany
import cuon.Garden.SingleBotany
except:
print 'No botany module found'
class articleswindow(chooseWindows):
def __init__(self, allTables):
chooseWindows.__init__(self)
self.loadGlade('articles.xml', 'ArticlesMainwindow')
#self.win1 = self.getWidget('ArticlesMainwindow')
#self.win1.maximize()
self.setStatusBar('vb_main')
self.oDocumentTools = cuon.DMS.documentTools.documentTools()
self.ModulNumber = self.MN['Articles']
self.allTables = allTables
self.singleArticleID = -1
self.singleArticle = SingleArticle.SingleArticle(allTables)
self.singleArticleForParts = SingleArticle.SingleArticle(allTables)
self.singleArticlePurchase = SingleArticlePurchase.SingleArticlePurchase(allTables)
self.singleArticleParts = SingleArticleParts.SingleArticleParts(allTables)
self.singleArticleSales = SingleArticleSale.SingleArticleSale(allTables)
self.singleArticleWebshop = SingleArticleWebshop.SingleArticleWebshop(allTables)
self.singleArticleStock = SingleArticleStock.SingleArticleStock(allTables)
self.singleArticleBarcode = SingleArticleBarcode.SingleArticleBarcode(allTables)
self.singleAddress = cuon.Addresses.SingleAddress.SingleAddress(allTables)
try:
self.singleBotany = cuon.Garden.SingleBotany.SingleBotany(allTables)
except:
pass
self.singlePrefsFinanceVat = cuon.PrefsFinance.SinglePrefsFinanceVat.SinglePrefsFinanceVat(allTables)
self.singleMaterialGroup = cuon.Articles.SingleMaterialgroups.SingleMaterialgroups(allTables)
# self.singleArticle.loadTable()
self.EntriesArticles = 'articles.xml'
self.EntriesArticlesPurchase = 'articles_purchase.xml'
self.EntriesArticlesParts = 'articles_parts.xml'
self.EntriesArticlesSales = 'articles_sales.xml'
self.EntriesArticlesWebshop = 'articles_webshop.xml'
self.EntriesArticlesStock = 'articles_stock.xml'
self.EntriesArticlesBarcode = 'articles_barcode.xml'
#singleArticle
self.loadEntries(self.EntriesArticles)
self.singleArticle.setEntries(self.getDataEntries( self.EntriesArticles) )
self.singleArticle.setGladeXml(self.xml)
liFields = self.rpc.callRP('Misc.getTreeInfo', "articles",self.dicUser)
print "liFields = ", liFields
self.singleArticle.setTree(self.getWidget('tv_article') )
if liFields:
self.liSearchFields = liFields[0]
self.singleArticle.setTreeFields(liFields[1])
self.singleArticle.setStore(self.getListStore(liFields[2]))
self.singleArticle.setListHeader(liFields[3])
self.singleArticle.setTreeOrder(liFields[4][0])
else:
self.singleArticle.setTreeFields( ['number', 'designation', "fct_getValueAsCurrency(sellingprice1) as price1", "fct_getValueAsCurrency(sellingprice2) as price2", "fct_getValueAsCurrency(sellingprice3) as price3", "fct_getValueAsCurrency(sellingprice4) as price4", 'unit', 'weight', ] )
# self.singleArticle.setStore( gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT) )
self.singleArticle.setStore( gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT) )
self.singleArticle.setTreeOrder('number, designation')
self.singleArticle.setListHeader(['number', 'designation', 'preis 1', 'preis 2', 'preis 3', 'preis 4', 'Einheit', 'Gewicht', ])
self.singleArticle.setTreeOrder('number, designation')
#singleArticleParts
self.loadEntries(self.EntriesArticlesParts)
self.singleArticleParts.setEntries(self.getDataEntries( self.EntriesArticlesParts) )
self.singleArticleParts.setGladeXml(self.xml)
#self.singleArticleParts.setTreeFields( ['part_id','number','articles.designation', 'quantities'] )
#self.singleArticleParts.setListHeader(['Article ID', 'Article Number', 'Article Designation', 'Quantities' ])
#self.singleArticleParts.setStore( gtk.ListStore(gobject.TYPE_UINT, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_FLOAT, gobject.TYPE_UINT) )
self.singleArticleParts.setTreeFields( ['part_id','quantities','articles.number as number' ,'articles.designation as ardesignation', 'articles_parts_list.designation as padesignation'] )
self.singleArticleParts.setListHeader(['Article ID', 'Quantities' , 'Article Number' , 'Article Designation', 'Part Designation'])
self.singleArticleParts.setStore( gtk.ListStore(gobject.TYPE_UINT,gobject.TYPE_FLOAT, gobject.TYPE_STRING, gobject.TYPE_STRING , gobject.TYPE_STRING , gobject.TYPE_UINT) )
self.singleArticleParts.setTreeOrder('part_id')
# self.singleArticleParts.setListHeader([''])
self.singleArticleParts.sWhere ='where article_id = ' + `self.singleArticle.ID` + ' and part_id = articles.id '
self.singleArticleParts.setTree(self.getWidget('tv_parts') )
#singleArticlePurchase
self.loadEntries(self.EntriesArticlesPurchase)
self.singleArticlePurchase.setEntries(self.getDataEntries( self.EntriesArticlesPurchase) )
self.singleArticlePurchase.setGladeXml(self.xml)
self.singleArticlePurchase.setTreeFields( ['articles_id','vendorsnumber', 'vendorsdesignation', 'unitprice', 'last_date'] )
self.singleArticlePurchase.setListHeader(['Article', 'Vendor ID','Designation','Price', 'Last Date' ])
self.singleArticlePurchase.setStore( gtk.ListStore(gobject.TYPE_UINT, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT) )
self.singleArticlePurchase.setTreeOrder('unitprice asc,vendorsnumber')
# self.singleArticlePurchase.setListHeader([''])
self.singleArticlePurchase.sWhere ='where articles_id = ' + `self.singleArticle.ID`
self.singleArticlePurchase.setTree(self.getWidget('tv_purchase') )
#singleArticleSales
self.loadEntries(self.EntriesArticlesSales)
self.singleArticleSales.setEntries(self.getDataEntries( self.EntriesArticlesSales) )
self.singleArticleSales.setGladeXml(self.xml)
self.singleArticleSales.setTreeFields( ['designation'] )
self.singleArticleSales.setStore( gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_UINT) )
self.singleArticleSales.setTreeOrder('designation')
self.singleArticleSales.setListHeader([_('Designation')])
self.singleArticleSales.sWhere ="where articles_number = '" + `self.singleArticle.ID` + "' "
self.singleArticleSales.setTree(self.getWidget('tv_sale') )
#singleArticleWebshop
self.loadEntries(self.EntriesArticlesWebshop)
self.singleArticleWebshop.setEntries(self.getDataEntries( self.EntriesArticlesWebshop) )
self.singleArticleWebshop.setGladeXml(self.xml)
## self.singleArticleWebshop.setTreeFields( ['articles_number'] )
## self.singleArticleWebshop.setStore( gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_UINT) )
## self.singleArticleWebshop.setTreeOrder('articles_number')
## self.singleArticleWebshop.setListHeader([_('article')])
self.singleArticleWebshop.sWhere ='where articles_number = ' + `self.singleArticle.ID`
#self.singleArticleWebshop.setTree(self.xml.get_widget('tree1') )
#singleArticleStock
self.loadEntries(self.EntriesArticlesStock)
self.singleArticleStock.setEntries(self.getDataEntries( self.EntriesArticlesStock ))
self.singleArticleStock.setGladeXml(self.xml)
## self.singleArticleStock.setTreeFields( ['designation'] )
## self.singleArticleStock.setStore( gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_UINT) )
## self.singleArticleStock.setTreeOrder('designation')
## self.singleArticleStock.setListHeader([_('Designation')])
self.singleArticleStock.sWhere ='where articles_id = ' + `self.singleArticle.ID`
#self.singleArticleStock.setTree(self.xml.get_widget('tree1') )
#singleArticleBarcode
self.loadEntries(self.EntriesArticlesBarcode)
self.singleArticleBarcode.setEntries(self.getDataEntries( self.EntriesArticlesBarcode ))
self.singleArticleBarcode.setGladeXml(self.xml)
self.singleArticleBarcode.sWhere ='where articles_id = ' + `self.singleArticle.ID`
#self.singleArticleStock.setTree(self.xml.get_widget('tree1') )
# Menu-items
self.initMenuItems()
# All window items
self.addEnabledMenuItems('window','quit1', 'q')
# Close Menus for Tab
self.addEnabledMenuItems('tabs','mi_article1')
self.addEnabledMenuItems('tabs','mi_purchase1')
self.addEnabledMenuItems('tabs','mi_sales1')
# seperate Menus
self.addEnabledMenuItems('article','mi_article1')
self.addEnabledMenuItems('purchase','mi_purchase1')
self.addEnabledMenuItems('sales','mi_sales1')
self.addEnabledMenuItems('sales','parts_list1')
# enabledMenues for Article
self.addEnabledMenuItems('editArticle','new1', self.dicUserKeys['articles_new'])
self.addEnabledMenuItems('editArticle','delete1', self.dicUserKeys['articles_delete'])
self.addEnabledMenuItems('editArticle','print1', self.dicUserKeys['articles_print'])
self.addEnabledMenuItems('editArticle','edit1',self.dicUserKeys['articles_edit'])
# enabledMenues for ArticleParts
self.addEnabledMenuItems('editArticleParts','PartsListNew', self.dicUserKeys['articles_new'])
self.addEnabledMenuItems('editPArticlearts','PartsListDelete')
self.addEnabledMenuItems('editArticleParts','PartsListEdit', self.dicUserKeys['articles_edit'])
# enabledMenues for ArticlePurchase
self.addEnabledMenuItems('editArticlePurchase','PurchaseNew1', self.dicUserKeys['articles_purchase_new'])
self.addEnabledMenuItems('editArticlePurchase','PurchaseDelete1')
self.addEnabledMenuItems('editArticlePurchase','PurchaseEdit1', self.dicUserKeys['articles_purchase_edit'])
# enabledMenues for ArticleSales
self.addEnabledMenuItems('editArticleSales','SalesNew1')
self.addEnabledMenuItems('editArticleSales','SalesDelete1')
self.addEnabledMenuItems('editArticleSales','SalesEdit1')
# enabledMenues for ArticleWebshop
self.addEnabledMenuItems('editArticleWebshop','WebshopClear1')
self.addEnabledMenuItems('editArticleWebshop','WebshopEdit1')
# enabledMenues for ArticleStock
self.addEnabledMenuItems('editArticleStock','StockClear1')
self.addEnabledMenuItems('editArticleStock','StockEdit1')
# enabledMenues for ArticleBarcode
#self.addEnabledMenuItems('editArticleBarcode','StockClear1')
self.addEnabledMenuItems('editArticleBarcode','BarcodeEdit')
# enabledMenues for Save
self.addEnabledMenuItems('editSave','save1', self.dicUserKeys['articles_save'])
self.addEnabledMenuItems('editSave','PartsListSave', self.dicUserKeys['articles_save'])
self.addEnabledMenuItems('editSave','PurchaseSave1', self.dicUserKeys['articles_save'])
self.addEnabledMenuItems('editSave','SalesSave1', self.dicUserKeys['articles_save'])
self.addEnabledMenuItems('editSave','WebshopSave1', self.dicUserKeys['articles_save'])
self.addEnabledMenuItems('editSave','StockSave1', self.dicUserKeys['articles_save'])
self.addEnabledMenuItems('editSave','BarcodeSave', self.dicUserKeys['articles_save'])
# tabs from notebook
self.tabArticle = 0
self.tabParts = 1
self.tabPurchase = 2
self.tabSales = 3
self.tabWebshop = 4
self.tabStock = 5
self.tabBarcode = 6
self.textbufferNotes, self.viewNotes = self.getNotesEditor()
cbAscociatedWith = self.getWidget('cbAssociatedWith' )
if cbAscociatedWith:
liststore = gtk.ListStore(str)
liststore2 = gtk.ListStore(str)
for ac in ["None","Botany"]:
liststore.append([ac])
cbAscociatedWith.set_model(liststore)
self.setComboBoxTextColumn(cbAscociatedWith)
cbAscociatedWith.show()
Scrolledwindow = self.getWidget('scArticleNotes')
Scrolledwindow.add(self.viewNotes)
self.viewNotes.show_all()
Scrolledwindow.show_all()
# set the widget
self.singleArticle.NotesArticles = self.textbufferNotes
# start
self.tabChanged()
# enabled menus for article
self.addEnabledMenuItems('editArticle','new1')
self.addEnabledMenuItems('editArticle','clear1')
self.addEnabledMenuItems('editArticle','print1')
# enabled menus for article_purchase
self.addEnabledMenuItems('editArticlePurchase','PurchaseNew1')
self.addEnabledMenuItems('editArticlePurchase','PurchaseClear1')
self.win1.add_accel_group(self.accel_group)
#Menu File
def on_quit1_activate(self, event):
print "exit articles v2"
self.closeWindow()
#Menu Article
def on_save1_activate(self, event):
print "save articles v2"
self.singleArticle.save()
self.setEntriesEditable(self.EntriesArticles, False)
self.tabChanged()
def on_new1_activate(self, event):
print "new articles v2"
self.singleArticle.newRecord()
self.setEntriesEditable(self.EntriesArticles, True)
def on_edit1_activate(self, event):
self.setEntriesEditable(self.EntriesArticles, True)
def on_delete1_activate(self, event):
print "delete articles v2"
self.singleArticle.deleteRecord()
#Menu Parts
def on_parts_list_save_activate(self, event):
print "save Parts articles v2"
self.singleArticleParts.articlesID = self.singleArticle.ID
self.singleArticleParts.save()
self.setEntriesEditable(self.EntriesArticlesParts, False)
self.tabChanged()
def on_parts_list_new_activate(self, event):
print "new Parts articles v2"
self.singleArticleParts.newRecord()
self.setEntriesEditable(self.EntriesArticlesParts, True)
def on_parts_list_edit_activate(self, event):
self.setEntriesEditable(self.EntriesArticlesParts, True)
def on_parts_list_delete_activate(self, event):
print "delete Parts articles v2"
self.singleArticleParts.deleteRecord()
#Menu Purchase
def on_PurchaseSave1_activate(self, event):
print "save Partner articles v2"
self.singleArticlePurchase.articlesID = self.singleArticle.ID
self.singleArticlePurchase.save()
self.setEntriesEditable(self.EntriesArticlesPurchase, False)
self.tabChanged()
def on_PurchaseNew1_activate(self, event):
print "new Purchase articles v2"
self.singleArticlePurchase.newRecord()
self.setEntriesEditable(self.EntriesArticlesPurchase, True)
def on_PurchaseEdit1_activate(self, event):
self.setEntriesEditable(self.EntriesArticlesPurchase, True)
def on_PurchaseDelete1_activate(self, event):
print "delete Purchase articles v2"
self.singleArticlePurchase.deleteRecord()
#Articles Sales
def on_SalesSave1_activate(self, event):
print "save Sales articles v2"
self.singleArticleSales.articlesNumber = self.singleArticle.ID
self.singleArticleSales.save()
self.setEntriesEditable(self.EntriesArticlesSales, False)
self.tabChanged()
def on_SalesNew1_activate(self, event):
print "new Partner articles v2"
self.singleArticleSales.newRecord()
self.setEntriesEditable(self.EntriesArticlesSales, True)
def on_SalesEdit1_activate(self, event):
self.setEntriesEditable(self.EntriesArticlesSales, True)
def on_SalesDelete1_activate(self, event):
print "delete Sales articles v2"
self.singleArticleSales.deleteRecord()
#Articles Webshop
def on_WebshopSave1_activate(self, event):
print "save articles Webshop v2"
print "article ID = ", self.singleArticle.ID
self.singleArticleWebshop.articlesNumber = self.singleArticle.ID
self.singleArticleWebshop.save()
self.setEntriesEditable(self.EntriesArticlesWebshop, False)
self.tabChanged()
def on_WebshopNew1_activate(self, event):
print "new Partner articles v2"
self.singleArticleWebshop.newRecord()
self.setEntriesEditable(self.EntriesArticlesWebshop, True)
def on_WebshopEdit1_activate(self, event):
self.setEntriesEditable(self.EntriesArticlesWebshop, True)
def on_WebshopClear1_activate(self, event):
print "delete Partner articles v2"
self.singleArticleWebshop.deleteRecord()
def on_bChooseCategory_clicked(self, event):
pass
#Articles Stock
def on_StockSave1_activate(self, event):
print "save Stock articles v2"
self.singleArticleStock.articlesID = self.singleArticle.ID
self.singleArticleStock.save()
self.setEntriesEditable(self.EntriesArticlesStock, False)
self.tabChanged()
def on_StockNew1_activate(self, event):
print "new Partner articles v2"
self.singleArticleStock.newRecord()
self.setEntriesEditable(self.EntriesArticlesStock, True)
def on_StockEdit1_activate(self, event):
self.setEntriesEditable(self.EntriesArticlesStock, True)
def on_StockClear1_activate(self, event):
print "delete Partner articles v2"
self.singleArticleStock.deleteRecord()
# Articles barcode
def on_BarcodeEdit_activate(self, event):
print "edit barcode"
self.setEntriesEditable(self.EntriesArticlesBarcode, True)
def on_BarcodeSave_activate(self, event):
print "save Barcode articles v2", self.singleArticleBarcode.ID
self.singleArticleBarcode.articlesID = self.singleArticle.ID
self.singleArticleBarcode.save()
self.setEntriesEditable(self.EntriesArticlesBarcode, False)
#self.tabChanged()
# Menu Lists
def on_liArticlesNumber1_activate(self, event):
self.out( "lists startet")
Pdf = cuon.Articles.lists_articles_number1.lists_articles_number1()
#Menu pickles_articles
def on_one_standard1_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(1)
def on_sp101_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(1, 'sp101')
def on_sp102_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(1, 'sp102')
def on_two_standard1_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(2)
def on_sp201_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(2, 'sp201')
def on_sp202_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(2, 'sp202')
def on_three_standard1_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(3)
def on_sp301_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(3, 'sp301')
def on_sp302_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(3, 'sp302')
def on_four_standard1_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(4)
def on_sp401_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(4, 'sp401')
def on_sp402_activate(self, event):
pdf = cuon.Articles.pickles_articles.pickles_articles(4, 'sp402')
def on_Barcode_sp1_activate(self,event):
pdf = cuon.Articles.pickles_articles.pickles_articles(1, 'barcode1_sp1')
def on_Barcode_sp2_activate(self,event):
pdf = cuon.Articles.pickles_articles.pickles_articles(1, 'barcode1_sp2')
def on_tbNew_clicked(self, event):
if self.tabOption == self.tabArticle:
self.on_new1_activate(event)
if self.tabOption == self.tabParts:
self.on_parts_list_new_activate(event)
elif self.tabOption == self.tabPurchase:
self.on_PurchaseNew1_activate(event)
elif self.tabOption == self.tabSales:
self.on_SalesNew1_activate(event)
elif self.tabOption == self.tabWebshop:
self.on_WebshopNew1_activate(event)
elif self.tabOption == self.tabStock:
self.on_StockNew1_activate(event)
def on_tbEdit_clicked(self, event):
if self.tabOption == self.tabArticle:
self.on_edit1_activate(event)
if self.tabOption == self.tabParts:
self.on_parts_list_edit_activate(event)
elif self.tabOption == self.tabPurchase:
self.on_PurchaseEdit1_activate(event)
elif self.tabOption == self.tabSales:
self.on_SalesEdit1_activate(event)
elif self.tabOption == self.tabWebshop:
self.on_WebshopEdit1_activate(event)
elif self.tabOption == self.tabStock:
self.on_StockEdit1_activate(event)
elif self.tabOption == self.tabBarcode:
self.on_BarcodeEdit_activate(event)
def on_tbSave_clicked(self, event):
if self.tabOption == self.tabArticle:
self.on_save1_activate(event)
if self.tabOption == self.tabParts:
self.on_parts_list_save_activate(event)
elif self.tabOption == self.tabPurchase:
self.on_PurchaseSave1_activate(event)
elif self.tabOption == self.tabSales:
self.on_SalesSave1_activate(event)
elif self.tabOption == self.tabWebshop:
self.on_WebshopSave1_activate(event)
elif self.tabOption == self.tabStock:
self.on_StockSave1_activate(event)
elif self.tabOption == self.tabBarcode:
self.on_BarcodeSave_activate(event)
def on_tbDuplicate_clicked(self, event):
print 'Duplicate this article'
newID = self.rpc.callRP("Article.duplicateArticle",self.singleArticle.ID , self.dicUser)
def on_eBarcodeNumber_key_press_event(self, entry, event):
if self.checkKey(event,'NONE','Return'):
self.activateClick("tbSave",event,"clicked")
def on_chooseArticle_activate(self, event):
# choose Article from other Modul
self.setChooseValue(self.singleArticle.ID)
print 'Article-ID = ' + `self.singleArticle.ID`
self.closeWindow()
def on_tree1_row_activated(self, event, data1, data2):
print 'DoubleClick tree1'
self.activateClick('chooseArticle', event)
#choose Vendor button
def on_bChooseVendor_clicked(self, event):
adr = cuon.Addresses.addresses.addresswindow(self.allTables)
adr.setChooseEntry('chooseAddress', self.getWidget( 'eAddressNumber'))
# signals from entry eAddressNumber
def on_eAddressNumber_changed(self, event):
print 'eAdrnbr changed'
eAdrField = self.getWidget('eAddressField1')
liAdr = self.singleAddress.getAddress(long(self.getWidget( 'eAddressNumber').get_text()))
eAdrField.set_text(liAdr[0] + ', ' + liAdr[4])
# search button Article
def on_bSearch_clicked(self, event):
self.searchArticle()
def on_eFindNumber_editing_done(self, event):
print 'Find Number'
self.searchArticle()
def on_eFindNumber_key_press_event(self, entry,event):
if self.checkKey(event,'NONE','Return'):
self.searchArticle()
def on_eFindDesignation_editing_done(self, event):
print 'Find Designation'
self.searchArticle()
def on_eFindDesignation_key_press_event(self, entry,event):
if self.checkKey(event,'NONE','Return'):
self.searchArticle()
def on_eFindMaterialGroupID_key_press_event(self, entry,event):
if self.checkKey(event,'NONE','Return'):
self.searchArticle()
def searchArticle(self):
self.out( 'Searching ....', self.ERROR)
sNumber = self.getWidget('eFindNumber').get_text()
sDesignation = self.getWidget('eFindDesignation').get_text()
sID = self.getWidget('eFindMaterialGroupID').get_text()
print "sID = ", sID
#self.out('Name and City = ' + sNumber + ', ' + sDesignation, self.ERROR)
liSearch = []
if sNumber:
liSearch.append('number')
liSearch.append(sNumber)
if sDesignation:
liSearch.append('designation')
liSearch.append(sDesignation)
if sID:
print "material group search"
liSearch.append('material_group =')
liSearch.append(int(sID))
self.singleArticle.sWhere = self.getWhere(liSearch)
#self.out(self.singleArticle.sWhere, self.ERROR)
self.refreshTree()
# button search article at partslist
def on_bPartsFindArticle_clicked(self, event):
ar = cuon.Articles.articles.articleswindow(self.allTables)
ar.setChooseEntry('chooseArticle', self.getWidget( 'ePartsArticleID'))
def on_ePartsArticleID_changed(self, event):
print 'eArticle changed'
iArtNumber = self.getChangedValue('ePartsArticleID')
iPartNumber = self.singleArticle.ID
eArtField = self.getWidget('ePartsArticleDesignation')
eArtNumber = self.getWidget('ePartsArticleNumber')
#liArt = self.singleArticleForParts.getArticle(iArtNumber)
#self.setTextbuffer(eArtField,liArt)
dicPrices = self.singleArticleForParts.getSellingPrices(iArtNumber, iPartNumber)
print 'Prices for ', iArtNumber, iPartNumber, dicPrices
if iArtNumber and iArtNumber > 0:
eArtField.set_text(self.singleArticleForParts.getArticleDesignation(iArtNumber))
eArtNumber.set_text(self.singleArticleForParts.getArticleNumber(iArtNumber))
else:
eArtField.set_text('')
eArtNumber.set_text('')
if len(dicPrices) == 9:
self.getWidget('eArticlePriceI').set_text(self.getCheckedValue(`dicPrices['s1']`, 'toStringFloat'))
self.getWidget('eArticlePriceII').set_text(self.getCheckedValue(`dicPrices['s2']`, 'toStringFloat'))
self.getWidget('eArticlePriceIII').set_text(self.getCheckedValue(`dicPrices['s3']`, 'toStringFloat'))
self.getWidget('eArticlePriceIV').set_text(self.getCheckedValue(`dicPrices['s4']`, 'toStringFloat'))
self.getWidget('eArticleTotalPriceI').set_text(self.getCheckedValue(`dicPrices['ts1']`, 'toStringFloat'))
self.getWidget('eArticleTotalPriceII').set_text(self.getCheckedValue(`dicPrices['ts2']`, 'toStringFloat'))
self.getWidget('eArticleTotalPriceIII').set_text(self.getCheckedValue(`dicPrices['ts3']`, 'toStringFloat'))
self.getWidget('eArticleTotalPriceIV').set_text(self.getCheckedValue(`dicPrices['ts4']`, 'toStringFloat'))
# search button Parts List
def on_bPLSearch_clicked(self, event):
self.searchParts()
def on_ePLFind_key_press_event(self, entry, event):
print 'Find Parts'
if self.checkKey(event,'NONE','Return'):
print 'find parts return event'
self.searchParts()
def searchParts(self):
sNumber = self.getWidget('ePLFindNumber').get_text()
sDesignation = self.getWidget('ePLFindDesignation').get_text()
sID = self.getWidget('ePLFindID').get_text()
sDescription = self.getWidget('ePLFindDescription').get_text()
sMaterialGroup = self.getWidget('ePLFindMaterialGroup').get_text()
print "sID = ", sID
#self.out('Name and City = ' + sNumber + ', ' + sDesignation, self.ERROR)
liSearch = []
if sNumber:
liSearch.append('articles.number')
liSearch.append(sNumber)
if sDescription:
liSearch.append('articles_parts_list.designation')
liSearch.append(sDescription)
if sDesignation:
liSearch.append('articles.designation')
liSearch.append(sDesignation)
if sMaterialGroup:
liSearch.append('material_group')
liSearch.append(sMaterialGroup)
if sID:
liSearch.append('part_id')
liSearch.append(sID)
print 'liSearch = ', liSearch
self.singleArticleParts.sWhere = self.getWhere(liSearch) + ' and articles_parts_list.article_id = ' + `self.singleArticleID` + ' and articles_parts_list.part_id = articles.id '
print self.singleArticleParts.sWhere
#self.out(self.singleArticle.sWhere, self.ERROR)
self.Find = True
self.refreshTree()
def on_bChooseMaterialGroup_clicked(self, event):
print 'materialgroup'
mag = cuon.Articles.materialgroup.materialgroupwindow(self.allTables)
mag.setChooseEntry('chooseMaterialgroup', self.getWidget( 'eCategoryNr'))
def on_eCategoryNr_changed(self, event):
print 'eCategory changed'
iMaterialGroup = self.getChangedValue('eCategoryNr')
sGroupName = self.singleMaterialGroup.getNameAndDesignation(iMaterialGroup)
if len(sGroupName) > 0:
self.getWidget('eCategory').set_text(sGroupName)
else:
self.getWidget('eCategory').set_text('')
def on_bSearchAssociated_clicked(self, event):
print 'search associated'
articleAssociated = self.getWidget('cbAssociatedWith').get_active()
print "status at cbAssosiatedWith = ", articleAssociated
if articleAssociated == 1:
#if self.singleArticle.firstRecord and self.singleArticle.firstRecord['associated_with'] == 1:
#botany
bot = cuon.Garden.botany.botanywindow(self.allTables)
bot.setChooseEntry('chooseBotany', self.getWidget('eAssociatedNr'))
def on_eAssociatedNr_changed(self, event):
print 'eAssocsiatedNr changed'
#
# iMaterialGroup = self.getChangedValue('eCategoryNr')
# sGroupName = self.singleMaterialGroup.getNameAndDesignation(iMaterialGroup)
# if len(sGroupName) > 0:
# self.getWidget('eAssocsiatedText').set_text(sGroupName)
# else:
# self.getWidget('eAssocsiatedText').set_text('')
#
self.on_cbAssociatedWith_changed(event)
def on_bGotoAssociated_clicked(self, event):
if self.getWidget('cbAssociatedWith').get_active() == 1:
# Botany
bot = cuon.Garden.botany.botanywindow(self.allTables)
def on_cbAssociatedWith_changed(self, event):
print 'goto associated'
articleAssociated = self.getWidget('cbAssociatedWith').get_active()
print "associated with = ", self.singleArticle.firstRecord
if self.singleArticle.firstRecord and self.singleArticle.firstRecord['associated_with'] == 1:
print 'cbAssociatedID read'
#iBotID = self.singleBotany.getAssociatedID(self.singleArticle.ID)
#print 'Botany ID = ', iBotID
#sBotany = self.singleBotany.getBotanyName(iBotID)
iBotID = self.singleArticle.firstRecord['associated_id']
print 'Botany ID = ', iBotID
sBotany = self.singleBotany.getBotanyName(iBotID)
if sBotany:
sBotany = self.singleBotany.getBotanyName(iBotID)
self.setText2Widget(sBotany,'eAssocsiatedText')
else:
self.setText2Widget('','eAssocsiatedText')
else:
self.setText2Widget('','eAssocsiatedText')
def on_bQuickAppend_clicked(self, event):
pass
def on_eTaxVat_changed(self, event):
TaxVat = self.getChangedValue('eTaxVat')
sTaxVat = self.singlePrefsFinanceVat.getNameAndDesignation(iTaxVat)
if sTaxVat:
self.getWidget('eTaxVatTex').set_text(sTaxVat)
else:
self.getWidget('eTaxVatText').set_text('')
#choose Manufactor button
def on_bChooseManufactor_clicked(self, event):
adr = cuon.Addresses.addresses.addresswindow(self.allTables)
adr.setChooseEntry(_('chooseAddress'), self.getWidget( 'eManufactorNumber'))
# signals from entry eManufactorNumber
def on_eManufactorNumber_changed(self, event):
print 'eManufactor changed'
eAdrField = self.getWidget('eManufactorField1')
liAdr = self.singleAddress.getAddress(self.getWidget( 'eManufactorNumber').get_text())
eAdrField.set_text(liAdr[0] + ', ' + liAdr[4])
def on_tbDMS_clicked(self, event):
print 'dms clicked'
if self.singleArticle.ID > 0:
print 'ModulNumber', self.ModulNumber
Dms = cuon.DMS.dms.dmswindow(self.allTables, self.ModulNumber, {'1':self.singleArticle.ID})
def on_bShowDMS_clicked(self, event):
print 'dms clicked'
if self.singleArticle.ID > 0:
print 'ModulNumber', self.ModulNumber
Dms = cuon.DMS.dms.dmswindow(self.allTables, self.ModulNumber, {'1':self.singleArticle.ID})
# sales
def on_ePicDmsID_changed(self, event):
try:
picID = int(self.getWidget("ePicDmsID").get_text())
dt = cuon.DMS.documentTools.documentTools()
sFile = dt.load_article_catalogue_pic(self.allTables, picID )
if sFile:
print "image found"
logo = self.getWidget("imCataloguePic")
#
# newIm = Image.fromstring('RGB',[1024, 1024], bz2.decompress( image))
# newIm.thumbnail([208,208])
# sFile = self.dicUser['prefPath']['tmp'] + 'cuon_mainwindow_logo.png'
# save(sFile)
print 'sFile = ', sFile
pixbuf = gtk.gdk.pixbuf_new_from_file(sFile)
scaled_buf = pixbuf.scale_simple(208,208,gtk.gdk.INTERP_BILINEAR)
logo.set_from_pixbuf(scaled_buf)
logo.show()
#scale ok ?
except:
pass
def on_bSearchPic_clicked(self, event):
Dms = cuon.DMS.dms.dmswindow(self.allTables, module=self.ModulNumber, sep_info = {'1':self.singleAddress.ID})
Dms.setChooseEntry('chooseDMS', self.getWidget( 'ePicDmsID'))
def on_bGotoPic_clicked(self, event):
pass
def refreshTree(self):
self.singleArticle.disconnectTree()
self.singleArticlePurchase.disconnectTree()
self.singleArticleParts.disconnectTree()
self.singleArticlePurchase.disconnectTree()
if self.tabOption == self.tabArticle:
#self.singleArticle.connectTree()
#self.singleArticle.refreshTree()
self.on_cbAssociatedWith_changed(None)
self.singleArticle.connectTree()
self.singleArticle.refreshTree()
self.singleArticle.setTreeSensitive(True)
elif self.tabOption == self.tabParts:
print 'refresh tree at parts'
if self.Find:
self.Find = False
else:
self.singleArticleParts.sWhere ='where article_id = ' + `int(self.singleArticle.ID)` + ' and part_id = articles.id '
self.singleArticleParts.connectTree()
self.singleArticleParts.refreshTree()
self.singleArticleParts.setTreeSensitive(True)
elif self.tabOption == self.tabPurchase:
self.singleArticlePurchase.sWhere ='where articles_id = ' + `int(self.singleArticle.ID)`
self.singleArticlePurchase.connectTree()
self.singleArticlePurchase.refreshTree()
self.singleArticlePurchase.setTreeSensitive(True)
elif self.tabOption == self.tabSales:
self.singleArticleSales.sWhere ="where articles_number = '" + `int(self.singleArticle.ID)` + "' "
self.singleArticleSales.connectTree()
self.singleArticleSales.refreshTree()
elif self.tabOption == self.tabWebshop:
self.singleArticleWebshop.sWhere ='where articles_number = ' + `int(self.singleArticle.ID)`
self.singleArticleWebshop.setEmptyEntries()
self.singleArticleWebshop.getFirstRecord()
self.singleArticleWebshop.fillEntries(self.singleArticleWebshop.ID)
print "-----------> end tab Webshop"
elif self.tabOption == self.tabStock:
print "-----------> begin tab Stock"
self.singleArticleStock.sWhere ='where articles_id = ' + `int(self.singleArticle.ID)`
self.singleArticleStock.setEmptyEntries()
self.singleArticleStock.getFirstRecord()
self.singleArticleStock.articlesID = self.singleArticle.ID
if self.singleArticleStock.ID > 0:
self.singleArticleStock.fillEntries(self.singleArticleStock.ID)
else:
#dicAr = {'articles_number':self.singleArticle.getArticleNumber(self.singleArticle.ID)}
dicAr = {'articles_id':self.singleArticle.ID}
self.singleArticleStock.fillOtherEntries(dicAr)
print "-----------> end tab Stock"
elif self.tabOption == self.tabBarcode:
print "-----------> begin tab Barcode tree "
self.singleArticleBarcode.sWhere ='where article_id = ' + `int(self.singleArticle.ID)`
self.singleArticleBarcode.setEmptyEntries()
misc_id = self.rpc.callRP('Article.findBarcodeID', self.singleArticle.ID, self.dicUser)
print "misc_id by find tree = ", misc_id
if misc_id > 0:
#misc_id = self.rpc.callRP('Article.createBarcodeEntry', self.singleArticle.ID, self.dicUser)
#print 'new misc_id = ', misc_id
self.singleArticleBarcode.load(misc_id)
print "new barcode ID = ", self.singleArticleBarcode.ID
#self.singleArticleBarcode.getFirstRecord()
self.singleArticleBarcode.articlesID = self.singleArticle.ID
print "save barcode ", self.singleArticleBarcode.ID, "for ", self.singleArticleBarcode.articlesID
if self.singleArticleBarcode.ID > 0:
self.singleArticleBarcode.fillEntries(self.singleArticleBarcode.ID)
else:
#dicAr = {'articles_number':self.singleArticle.getArticleNumber(self.singleArticle.ID)}
dicAr = {'articles_id':self.singleArticle.ID}
print "-----------> end tab Stock"
def tabChanged(self):
print 'tab changed to :' + str(self.tabOption)
if self.tabOption == self.tabArticle:
#Article
self.disableMenuItem('tabs')
self.enableMenuItem('article')
print 'Seite 0'
self.editAction = 'editArticle'
self.setStatusbarText([''])
self.lastTab = self.tabArticle
elif self.tabOption == self.tabParts:
#Parts
if self.lastTab == self.tabArticle:
self.singleArticleID = self.singleArticle.ID
self.disableMenuItem('tabs')
self.enableMenuItem('Parts')
self.editAction = 'editArticleParts'
print 'Seite 1'
self.setStatusbarText([self.singleArticle.sStatus])
elif self.tabOption == self.tabPurchase:
#Purchase
self.lastTab = self.tabPurchase
self.disableMenuItem('tabs')
self.enableMenuItem('purchase')
self.editAction = 'editArticlePurchase'
print 'Seite 2'
self.setStatusbarText([self.singleArticle.sStatus])
elif self.tabOption == self.tabSales:
self.lastTab = self.tabSales
self.disableMenuItem('tabs')
self.enableMenuItem('sales')
self.editAction = 'editArticleSales'
print 'Seite 3'
self.setStatusbarText([self.singleArticle.sStatus])
elif self.tabOption == self.tabWebshop:
self.lastTab = self.tabWebshop
self.disableMenuItem('tabs')
self.enableMenuItem('sales')
self.editAction = 'editArticleWebshop'
self.singleArticleWebshop.setTreeSensitive(False)
print 'Seite 4'
self.setStatusbarText([self.singleArticle.sStatus])
elif self.tabOption == self.tabStock:
self.lastTab = self.tabStock
self.disableMenuItem('tabs')
self.enableMenuItem('sales')
self.editAction = 'editArticleStock'
self.setStatusbarText([self.singleArticle.sStatus])
print 'Seite 5'
elif self.tabOption == self.tabBarcode:
self.lastTab = self.tabBarcode
self.disableMenuItem('tabs')
self.enableMenuItem('barcode')
self.editAction = 'editArticleBarcode'
self.setStatusbarText([self.singleArticle.sStatus])
self.activateClick("BarcodeEdit")
print 'Seite 6'
# refresh the Tree
self.refreshTree()
self.enableMenuItem(self.editAction)
self.editEntries = False
| CuonDeveloper/cuon | cuon_client/cuon/bin/cuon/Articles/articles.py | Python | gpl-3.0 | 44,421 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from django.conf.urls import url
from . import views
urlpatterns = [
url(r"^new/", views.new_spark_job, name="jobs-new"),
url(
r"^identifier-available/",
views.check_identifier_available,
name="jobs-identifier-available",
),
url(r"^(?P<id>\d+)/delete/", views.delete_spark_job, name="jobs-delete"),
url(r"^(?P<id>\d+)/download/", views.download_spark_job, name="jobs-download"),
url(r"^(?P<id>\d+)/edit/", views.edit_spark_job, name="jobs-edit"),
url(r"^(?P<id>\d+)/run/", views.run_spark_job, name="jobs-run"),
url(r"^(?P<id>\d+)/$", views.detail_spark_job, name="jobs-detail"),
url(r"^(?P<id>\d+)/zeppelin/", views.detail_zeppelin_job, name="jobs-zeppelin"),
]
| mozilla/telemetry-analysis-service | atmo/jobs/urls.py | Python | mpl-2.0 | 924 |
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length
class PostForm(Form):
post = StringField('post', validators=[DataRequired()])
class SearchForm(Form):
search = StringField('search', validators=[DataRequired()])
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class EditForm(Form):
nickname = StringField('nickname', validators=[DataRequired()])
about_me = TextAreaField('about_me', validators=[Length(min=0, max=140)])
def __init__(self, original_nickname, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.original_nickname = original_nickname
def validate(self):
if not Form.validate(self):
return False
if self.nickname.data == self.original_nickname:
return True
user = User.query.filter_by(nickname=self.nickname.data).first()
if user != None:
self.nickname.errors.append('This nickname is already in use. Please choose another one.')
return False
return True | skrillex581/flask-insight | app/forms.py | Python | gpl-2.0 | 1,099 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('block', '0002_auto_20141011_2223'),
]
operations = [
migrations.CreateModel(
name='HeaderFooter',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('header', models.CharField(max_length=150)),
('url_twitter', models.URLField(verbose_name='Twitter URL', blank=True)),
('url_linkedin', models.URLField(verbose_name='LinkedIn URL', blank=True)),
],
options={
'verbose_name': 'Header and footer',
'verbose_name_plural': 'Header and footers',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Template',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('template_name', models.CharField(max_length=150, help_text="File name e.g. 'compose/page_article.html'")),
],
options={
'ordering': ('template_name',),
'verbose_name': 'Template',
'verbose_name_plural': 'Templates',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TemplateSection',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('section', models.ForeignKey(to='block.Section')),
('template', models.ForeignKey(to='cms.Template')),
],
options={
'ordering': ('template__template_name', 'section__name'),
'verbose_name': 'Template section',
'verbose_name_plural': 'Template sections',
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='templatesection',
unique_together=set([('template', 'section')]),
),
]
| pkimber/cms | cms/migrations/0001_initial.py | Python | apache-2.0 | 2,513 |
#!/usr/bin/env python
import sys
import math
import time
import os
from cv2 import *
import numpy as np
from src.server import CubeScoutServer
class SightingInfo:
def __init__(self):
self.since_sighting = 9999999 # Time since last sighting
self.since_notify = 9999999 # Time since last notification
self.count = 0 # Count since last notification
def read_csv(filename):
images = []
labels = []
names = {} # Maps person ID to name
with open(filename, "r") as csv_file:
for line in csv_file:
columns = line.strip().split(";")
if len(columns) != 2:
continue # TODO: error message or something
path = columns[0]
label = int(columns[1])
if label not in names: # New person! :D
name = path.split("/")[-2]
names[label] = name
if path:
images.append(cvtColor(imread(path), COLOR_BGR2GRAY))
labels.append(label)
return images, labels, names
def detect(img, cascade):
rects = cascade.detectMultiScale(img, scaleFactor=1.3, minNeighbors=4, minSize=(40, 40))
if len(rects) == 0:
return []
rects[:,2:] += rects[:,:2]
return rects
def main():
# Validate program arguments, print usage if invalid
if len(sys.argv) < 2:
print("usage: " + sys.argv[0] + " device_id")
print("\t device_id -- The webcam device id to grab frames from.")
exit()
take_samples = False
if "-s" in sys.argv:
take_samples = True
if not os.path.exists("data/samples"):
os.makedirs("data/samples")
# Get the program arguments
#device_id = int(sys.argv[1])
device_id = sys.argv[1]
# Load the csv file
print("Loading training data...")
images = None
labels = None
names = None
try:
images, labels, names = read_csv("data/faces.csv")
except Exception as error:
sys.stderr.write("Failed to open 'data/faces.csv'. Reason: "+str(error)+"\n")
exit()
# Build sighting info by name
sighting_info = {}
for name in names.values():
sighting_info[name] = SightingInfo()
# Get image dimensions
im_width, im_height = images[0].shape
# Create a FaceRecognizer and train it on the given images
print("Training face recognizer...")
model = createLBPHFaceRecognizer()
model.train(images, np.asarray(labels))
# Create classifier for face detection
haar_face = CascadeClassifier()
haar_face.load("data/haarcascade_frontalface_default.xml")
print("Initializing video capture...")
# Get a handle to the video device
cap = VideoCapture(device_id)
# Make sure we can use the video device
if not cap.isOpened():
sys.stderr.write("Failed to open video capture device: "+device_id+"\n")
exit()
# Start server
print("Starting server...")
server = CubeScoutServer()
# Count image samples
image_sample_counter = 0
# For frame rate calculation stuff
last_frame_time = time.clock()
face_frames = []
print("Detecting faces...")
while True:
# Exit on escape key
key = waitKey(10)
if key == 27:
break
# Server listening
server.listen()
# Calculate delta time
dt = time.clock()-last_frame_time
last_frame_time = time.clock()
# Update time since last sightings
for name in sighting_info:
sighting_info[name].since_sighting += dt
sighting_info[name].since_notify += dt
if sighting_info[name].since_sighting > 0.5:
sighting_info[name].count = 0
# frame holds the current frame of the video device
frame_good, frame = cap.read()
if not frame_good:
break
# Clone the current frame
original = frame
# Convert the current frame into grayscale
gray = cvtColor(frame, COLOR_BGR2GRAY)
# Find the faces in the frame
faces = detect(gray, haar_face)
# At this point we have the position of the faces in faces.
# Now, we need to get the faces, make a prediction, and
# annotate it in the video.
for face in faces:
# Unpack rect
face_x1, face_y1, face_x2, face_y2 = face[0], face[1], face[2], face[3]
# Crop the face from the image
face_im = gray[face_y1:face_y2, face_x1:face_x2]
# Resize face for Eigenfaces and Fisherfaces or whatever
face_resized = resize(face_im, (im_width, im_height), 1.0, 1.0, INTER_CUBIC)
# Now perform prediction
prediction = model.predict(face_resized)
# Write face image if we want
if take_samples:
face_im_color = original[face_y1:face_y2, face_x1:face_x2]
face_im_color = resize(face_im_color, (130, 130), 1.0, 1.0, INTER_CUBIC)
print("Writing image...")
imwrite("data/samples/sample"+str(image_sample_counter)+".jpg", face_im_color)
image_sample_counter += 1
####################
# Write info to original image
rectangle(original, (face_x1, face_y1), (face_x2, face_y2), (0, 255, 0), 3)
# Create the text to annotate the box
box_text = ""
person = ""
face_difference = 0
if prediction:
person = names[prediction[0]] # Check which person it is
face_difference = math.floor(prediction[1])
box_text = person+":"+str(face_difference)
# Calculate the position for the annotation text
text_x = face_x1 - 10
text_y = face_y1 - 10
# Put the text into the image
putText(original, box_text, (text_x, text_y), FONT_HERSHEY_PLAIN, 2.0, (0, 120, 255), 2)
####################
# Handle the sighting!
if face_difference <= 130:
sighting_info[person].count += 1
sighting_info[person].since_sighting = 0
if sighting_info[person].since_notify > 15 and sighting_info[person].count > 3:
server.broadcast(person)
sighting_info[person].since_notify = 0
# Show the result
imshow("face_recognizer", original)
face_frames.append(original)
destroyAllWindows()
print("Ready for playback")
fps = raw_input("Enter playback framerate:")
for frame in face_frames:
# Exit on escape key
key = waitKey(10)
if key == 27:
break
# Cap framerate
while time.clock()-last_frame_time < 1.0/float(fps):
pass
# Calculate delta time
dt = time.clock()-last_frame_time
last_frame_time = time.clock()
# Display the frame
imshow("face_recognizer", frame)
if __name__ == "__main__":
main()
| tedsta/cube-scout | cubescout.py | Python | mit | 7,051 |
import websocket
import json
import requests
import urllib
import os
import logging
# Suppress InsecureRequestWarning
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
###VARIABLES THAT YOU NEED TO SET MANUALLY IF NOT ON HEROKU#####
try:
MESSAGE = os.environ['WELCOME-MESSAGE']
TOKEN = os.environ['SLACK-TOKEN']
UNFURL = os.environ['UNFURL-LINKS']
CHANNELS = os.environ['SLACK-CHANNELS']
except:
MESSAGE = 'Manually set the Message if youre not running through heroku or have not set vars in ENV'
TOKEN = 'Manually set the API Token if youre not running through heroku or have not set vars in ENV'
UNFURL = 'FALSE'
CHANNELS = 'Manually set the Channels if youre not running through heroku or have not set vars in ENV'
###############################################################
def parse_join(message):
logger = logging.getLogger()
m = json.loads(message)
if (m['type'] == "member_joined_channel"):
logger.info(CHANNELS)
if (m['channel'] in CHANNELS):
x = requests.get(
"https://slack.com/api/im.open?token=" + TOKEN + "&user=" + m["user"])
x = x.json()
x = x["channel"]["id"]
if (UNFURL.lower() == "false"):
xx = requests.post("https://slack.com/api/chat.postMessage?token=" + TOKEN + "&channel=" +
x + "&text=" + urllib.quote(MESSAGE) + "&parse=full&as_user=true&unfurl_links=false")
else:
xx = requests.post("https://slack.com/api/chat.postMessage?token=" + TOKEN + "&channel=" +
x + "&text=" + urllib.quote(MESSAGE) + "&parse=full&as_user=true")
# DEBUG
text = "HELLO SENT: " + m["user"]
logger.info(text)
#
# Connects to Slacks and initiates socket handshake
def start_rtm():
r = requests.get(
"https://slack.com/api/rtm.start?token=" + TOKEN, verify=False)
logger = logging.getLogger()
text = r.text
logger.info(text)
r = r.json()
r = r["url"]
return r
def on_message(ws, message):
parse_join(message)
def on_error(ws, error):
logger = logging.getLogger()
logger.error("SOME ERROR HAS HAPPENED: " + error)
def on_close(ws):
logger = logging.getLogger()
logger.warn("Connection Closed")
def on_open(ws):
logger = logging.getLogger()
logger.info("Connection Started - Auto Greeting new joiners to the network")
if __name__ == "__main__":
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
r = start_rtm()
logger.info("WebSocket URL:" + r)
ws = websocket.WebSocketApp(
r, on_message=on_message, on_error=on_error, on_close=on_close)
ws.run_forever()
| dukeluke16/greetingslack | bot.py | Python | mit | 2,880 |
#!/usr/bin/env python3
'''
Class to take care of peerme config
'''
import configparser
import logging
class PeermeConfig():
def __init__(self, conf_file):
self.conf_file = conf_file
self.config = configparser.ConfigParser()
self.config.read(conf_file)
if not self.config.sections():
self._default_load()
return
logging.info('Loaded {} config file'.format(conf_file))
def _default_load(self):
''' Return a default config '''
logging.warning('{} not found - Using default config'.format(
self.conf_file
))
self.conf_file = 'default'
self.config['peerme'] = {
'my_asn': 32934,
'http_timeout': 30
}
def __repr__(self):
output = ''
for section in self.config.sections():
output += '{}\n'.format(section)
for kv_pair in self.config.items(section):
output += ' {}\n'.format(kv_pair)
return output
| afenioux/peerme | peerme/config.py | Python | bsd-2-clause | 1,027 |
# Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import warnings
from .script_interface import ScriptInterfaceHelper, script_interface_register
from . import utils
@script_interface_register
class SingleReaction(ScriptInterfaceHelper):
_so_name = "ReactionMethods::SingleReaction"
_so_creation_policy = "LOCAL"
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not 'sip' in kwargs:
utils.check_valid_keys(self.valid_keys(), kwargs.keys())
def valid_keys(self):
return ("reactant_types", "reactant_coefficients",
"product_types", "product_coefficients", "gamma")
def required_keys(self):
return ("reactant_types", "reactant_coefficients",
"product_types", "product_coefficients", "gamma")
def make_backward_reaction(self):
return SingleReaction(
gamma=1. / self.gamma, reactant_types=self.product_types,
reactant_coefficients=self.product_coefficients,
product_types=self.reactant_types,
product_coefficients=self.reactant_coefficients)
@script_interface_register
class ReactionAlgorithm(ScriptInterfaceHelper):
"""
This class provides the base class for Reaction Algorithms like
the Reaction Ensemble algorithm and the constant pH method.
Initialize the reaction algorithm by setting the
standard pressure, temperature, and the exclusion radius.
Note: When creating particles the velocities of the new particles are set
according the Maxwell-Boltzmann distribution. In this step the mass of the
new particle is assumed to equal 1.
Parameters
----------
kT : :obj:`float`
Thermal energy of the system in simulation units
exclusion_radius : :obj:`float`
Minimal distance from any particle, within which new particle will not
be inserted. This is useful to avoid integrator failures if particles
are too close and there is a diverging repulsive interaction, or to
prevent two oppositely charged particles from being placed on top of
each other. The Boltzmann factor :math:`\\exp(-\\beta E)` gives these
configurations a small contribution to the partition function,
therefore they can be neglected.
seed : :obj:`int`
Initial counter value (or seed) of the Mersenne Twister RNG.
Methods
-------
remove_constraint()
Remove any previously defined constraint.
Requires setting the volume using :meth:`set_volume`.
set_cylindrical_constraint_in_z_direction()
Constrain the reaction moves within a cylinder aligned on the z-axis.
Requires setting the volume using :meth:`set_volume`.
Parameters
----------
center_x : :obj:`float`
x coordinate of center of the cylinder.
center_y : :obj:`float`
y coordinate of center of the cylinder.
radius_of_cylinder : :obj:`float`
radius of the cylinder
set_wall_constraints_in_z_direction()
Restrict the sampling area to a slab in z-direction. Requires setting
the volume using :meth:`set_volume`. This constraint is necessary when
working with :ref:`Electrostatic Layer Correction (ELC)`.
Parameters
----------
slab_start_z : :obj:`float`
z coordinate of the bottom wall.
slab_end_z : :obj:`float`
z coordinate of the top wall.
Examples
--------
>>> import espressomd
>>> import espressomd.shapes
>>> import espressomd.electrostatics
>>> import espressomd.reaction_ensemble
>>> import numpy as np
>>> # setup a charged system
>>> box_l = 20
>>> elc_gap = 10
>>> system = espressomd.System(box_l=[box_l, box_l, box_l + elc_gap])
>>> system.time_step = 0.001
>>> system.cell_system.skin = 0.4
>>> types = {"HA": 0, "A-": 1, "H+": 2, "wall": 3}
>>> charges = {types["HA"]: 0, types["A-"]: -1, types["H+"]: +1}
>>> for i in range(10):
... system.part.add(pos=np.random.random(3) * box_l, type=types["A-"], q=charges[types["A-"]])
... system.part.add(pos=np.random.random(3) * box_l, type=types["H+"], q=charges[types["H+"]])
>>> for particle_type in charges.keys():
... system.non_bonded_inter[particle_type, types["wall"]].wca.set_params(epsilon=1.0, sigma=1.0)
>>> # add ELC actor
>>> p3m = espressomd.electrostatics.P3M(prefactor=1.0, accuracy=1e-2)
>>> elc = espressomd.electrostatics.ELC(p3m_actor=p3m, maxPWerror=1.0, gap_size=elc_gap)
>>> system.actors.add(elc)
>>> # add constant pH method
>>> RE = espressomd.reaction_ensemble.ConstantpHEnsemble(kT=1, exclusion_radius=1, seed=77)
>>> RE.constant_pH = 2
>>> RE.add_reaction(gamma=0.0088, reactant_types=[types["HA"]],
... product_types=[types["A-"], types["H+"]],
... default_charges=charges)
>>> # add walls for the ELC gap
>>> RE.set_wall_constraints_in_z_direction(0, box_l)
>>> RE.set_volume(box_l**3)
>>> system.constraints.add(shape=espressomd.shapes.Wall(dist=0, normal=[0, 0, 1]),
... particle_type=types["wall"])
>>> system.constraints.add(shape=espressomd.shapes.Wall(dist=-box_l, normal=[0, 0, -1]),
... particle_type=types["wall"])
get_wall_constraints_in_z_direction()
Returns the restrictions of the sampling area in z-direction.
set_volume()
Set the volume to be used in the acceptance probability of the reaction
ensemble. This can be useful when using constraints, if the relevant
volume is different from the box volume. If not used the default volume
which is used, is the box volume.
Parameters
----------
volume : :obj:`float`
Volume of the system in simulation units
get_volume()
Get the volume to be used in the acceptance probability of the reaction
ensemble.
get_acceptance_rate_configurational_moves():
Returns the acceptance rate for the configuration moves.
get_acceptance_rate_reaction()
Returns the acceptance rate for the given reaction.
Parameters
----------
reaction_id : :obj:`int`
Reaction id
set_non_interacting_type()
Sets the particle type for non-interacting particles.
Default value: 100.
This is used to temporarily hide particles during a reaction trial
move, if they are to be deleted after the move is accepted. Please
change this value if you intend to use the type 100 for some other
particle types with interactions, or if you need improved performance,
as the default value of 100 causes some overhead.
Please also note that particles
in the current implementation of the Reaction Ensemble are only
hidden with respect to Lennard-Jones and Coulomb interactions. Hiding
of other interactions, for example a magnetic, needs to be implemented
in the code.
Parameters
----------
type : :obj:`int`
Particle type for the hidden particles
get_non_interacting_type()
Returns the type which is used for hiding particle
reaction()
Performs randomly selected reactions.
Parameters
----------
reaction_steps : :obj:`int`, optional
The number of reactions to be performed at once, defaults to 1.
displacement_mc_move_for_particles_of_type()
Performs a displacement Monte Carlo move for particles of given type.
New positions of the displaced particles are chosen from the whole box
with a uniform probability distribution. If there are multiple types,
that are being moved in a simulation, they should be moved in a random
order to avoid artefacts.
Parameters
----------
type_mc : :obj:`int`
Particle type which should be moved
particle_number_to_be_changed : :obj:`int`
Number of particles to move, defaults to 1.
delete_particle()
Deletes the particle of the given p_id and makes sure that the particle
range has no holes. This function has some restrictions, as e.g. bonds
are not deleted. Therefore only apply this function to simple ions.
Parameters
----------
p_id : :obj:`int`
Id of the particle to be deleted.
change_reaction_constant()
Changes the reaction constant of a given reaction
(for both the forward and backward reactions).
The ``reaction_id`` which is assigned to a reaction
depends on the order in which :meth:`add_reaction` was called.
The 0th reaction has ``reaction_id=0``, the next added
reaction needs to be addressed with ``reaction_id=1``, etc.
Parameters
----------
reaction_id : :obj:`int`
Reaction id
gamma : :obj:`float`
New reaction constant
delete_reaction()
Delete a reaction from the set of used reactions
(the forward and backward reaction).
The ``reaction_id`` which is assigned to a reaction
depends on the order in which :meth:`add_reaction` was called.
The 0th reaction has ``reaction_id=0``, the next added
reaction needs to be addressed with ``reaction_id=1``, etc.
After the deletion of a reaction subsequent reactions
take the ``reaction_id`` of the deleted reaction.
Parameters
----------
reaction_id : :obj:`int`
Reaction id
"""
_so_name = "ReactionMethods::ReactionAlgorithm"
_so_creation_policy = "LOCAL"
_so_bind_methods = ("remove_constraint",
"get_wall_constraints_in_z_direction",
"set_wall_constraints_in_z_direction",
"set_cylindrical_constraint_in_z_direction",
"set_volume",
"get_volume",
"get_acceptance_rate_reaction",
"set_non_interacting_type",
"get_non_interacting_type",
"reaction",
"displacement_mc_move_for_particles_of_type",
"check_reaction_method",
"change_reaction_constant",
"delete_reaction",
"delete_particle",
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not 'sip' in kwargs:
utils.check_valid_keys(self.valid_keys(), kwargs.keys())
def valid_keys(self):
return {"kT", "exclusion_radius", "seed"}
def required_keys(self):
return {"kT", "exclusion_radius", "seed"}
def add_reaction(self, **kwargs):
"""
Sets up a reaction in the forward and backward direction.
Parameters
----------
gamma : :obj:`float`
Equilibrium constant :math:`\\Gamma` of the reaction in simulation
units (see section :ref:`Reaction Ensemble` for its definition).
reactant_types : list of :obj:`int`
List of particle types of reactants in the reaction.
reactant_coefficients : list of :obj:`int`
List of stoichiometric coefficients of the reactants in the same
order as the list of their types.
product_types : list of :obj:`int`
List of particle types of products in the reaction.
product_coefficients : list of :obj:`int`
List of stoichiometric coefficients of products of the reaction in
the same order as the list of their types
default_charges : :obj:`dict`
A dictionary of default charges for types that occur
in the provided reaction.
check_for_electroneutrality : :obj:`bool`
Check for electroneutrality of the given reaction.
Default is ``True``.
"""
default_charges = kwargs.pop("default_charges")
neutrality_check = kwargs.pop("check_for_electroneutrality", True)
forward_reaction = SingleReaction(**kwargs)
backward_reaction = forward_reaction.make_backward_reaction()
if neutrality_check:
self._check_charge_neutrality(
type2charge=default_charges,
reaction=forward_reaction)
self.call_method("add_reaction", reaction=forward_reaction)
self.call_method("add_reaction", reaction=backward_reaction)
for ptype, charge in default_charges.items():
self.call_method("set_charge_of_type", type=ptype, charge=charge)
self.call_method("check_reaction_method")
def _check_charge_neutrality(self, type2charge, reaction):
if not isinstance(type2charge, dict):
raise ValueError(
"No dictionary for relation between types and default charges provided.")
charges = np.array(list(type2charge.values()))
if np.count_nonzero(charges) == 0:
# all particles have zero charge
# no need to check electroneutrality
return
# calculate net change of electrical charge for the reaction
net_charge_change = 0.0
for coef, ptype in zip(
reaction.reactant_coefficients, reaction.reactant_types):
net_charge_change -= coef * type2charge[ptype]
for coef, ptype in zip(
reaction.product_coefficients, reaction.product_types):
net_charge_change += coef * type2charge[ptype]
min_abs_nonzero_charge = np.min(
np.abs(charges[np.nonzero(charges)[0]]))
if abs(net_charge_change) / min_abs_nonzero_charge > 1e-10:
raise ValueError("Reaction system is not charge neutral")
def get_status(self):
"""
Returns the status of the reaction ensemble in a dictionary containing
the used reactions, the used kT and the used exclusion radius.
"""
self.call_method("check_reaction_method")
reactions_list = []
for core_reaction in self.reactions:
reaction = {"reactant_coefficients": core_reaction.reactant_coefficients,
"reactant_types": core_reaction.reactant_types,
"product_types": core_reaction.product_types,
"product_coefficients": core_reaction.product_coefficients,
"reactant_types": core_reaction.reactant_types,
"gamma": core_reaction.gamma}
reactions_list.append(reaction)
return {"reactions": reactions_list, "kT": self.kT,
"exclusion_radius": self.exclusion_radius}
@script_interface_register
class ReactionEnsemble(ReactionAlgorithm):
"""
This class implements the Reaction Ensemble.
"""
_so_name = "ReactionMethods::ReactionEnsemble"
_so_creation_policy = "LOCAL"
@script_interface_register
class ConstantpHEnsemble(ReactionAlgorithm):
"""
This class implements the constant pH Ensemble.
When adding an acid-base reaction, the acid and base particle types
are always assumed to be at index 0 of the lists passed to arguments
``reactant_types`` and ``product_types``.
Attributes
----------
constant_pH : :obj:`float`
Constant pH value.
"""
_so_name = "ReactionMethods::ConstantpHEnsemble"
_so_creation_policy = "LOCAL"
def valid_keys(self):
return {"kT", "exclusion_radius", "seed", "constant_pH"}
def required_keys(self):
return {"kT", "exclusion_radius", "seed", "constant_pH"}
def add_reaction(self, *args, **kwargs):
warn_msg = (
"arguments 'reactant_coefficients' and 'product_coefficients' "
"are deprecated and are no longer necessary for the constant pH "
"ensemble. They are kept for backward compatibility but might "
"be deleted in future versions.")
err_msg = ("All product and reactant coefficients must equal one in "
"the constant pH method as implemented in ESPResSo.")
warn_user = False
if "reactant_coefficients" in kwargs:
if kwargs["reactant_coefficients"][0] != 1:
raise ValueError(err_msg)
else:
warn_user = True
else:
kwargs["reactant_coefficients"] = [1]
if "product_coefficients" in kwargs:
if kwargs["product_coefficients"][0] != 1 or kwargs["product_coefficients"][1] != 1:
raise ValueError(err_msg)
else:
warn_user = True
else:
kwargs["product_coefficients"] = [1, 1]
if warn_user:
warnings.warn(warn_msg, FutureWarning)
if(len(kwargs["product_types"]) != 2 or len(kwargs["reactant_types"]) != 1):
raise ValueError(
"The constant pH method is only implemented for reactions "
"with two product types and one adduct type.")
super().add_reaction(*args, **kwargs)
@script_interface_register
class WidomInsertion(ReactionAlgorithm):
"""
This class implements the Widom insertion method in the canonical ensemble
for homogeneous systems, where the excess chemical potential is not
depending on the location.
"""
_so_name = "ReactionMethods::WidomInsertion"
_so_creation_policy = "LOCAL"
def required_keys(self):
return {"kT", "seed"}
def valid_keys(self):
return {"kT", "seed"}
def add_reaction(self, **kwargs):
kwargs['gamma'] = 1.
super().add_reaction(**kwargs)
def calculate_particle_insertion_potential_energy(self, **kwargs):
"""
Measures the potential energy when particles are inserted in the
system following the reaction provided ``reaction_id``. Please
define the insertion moves first by calling the method
:meth:`~ReactionAlgorithm.add_reaction` (with only product types
specified).
Note that although this function does not provide directly
the chemical potential, it can be used to calculate it.
For an example of such an application please check
:file:`/samples/widom_insertion.py`.
"""
# make inverse widom scheme (deletion of particles) inaccessible.
# The deletion reactions are the odd reaction_ids
return self.call_method(
"calculate_particle_insertion_potential_energy", **kwargs)
def calculate_excess_chemical_potential(
self, **kwargs):
"""
Given a set of samples of the particle insertion potential energy,
calculates the excess chemical potential and its statistical error.
Parameters
----------
particle_insertion_potential_energy_samples : array_like of :obj:`float`
Samples of the particle insertion potential energy.
N_blocks : :obj:`int`, optional
Number of bins for binning analysis.
Returns
-------
mean : :obj:`float`
Mean excess chemical potential.
error : :obj:`float`
Standard error of the mean.
"""
def do_block_analysis(samples, N_blocks):
"""
Performs a binning analysis of samples.
Divides the samples in ``N_blocks`` equispaced blocks
and returns the mean and its uncertainty
"""
size_block = int(len(samples) / N_blocks)
block_list = []
for block in range(N_blocks):
block_list.append(
np.mean(samples[block * size_block:(block + 1) * size_block]))
sample_mean = np.mean(block_list)
sample_std = np.std(block_list, ddof=1)
sample_uncertainty = sample_std / np.sqrt(N_blocks)
return sample_mean, sample_uncertainty
kT = self.kT
gamma_samples = np.exp(-1.0 * np.array(
kwargs["particle_insertion_potential_energy_samples"]) / kT)
gamma_mean, gamma_std = do_block_analysis(
samples=gamma_samples, N_blocks=kwargs.get("N_blocks", 16))
mu_ex_mean = -kT * np.log(gamma_mean)
# full propagation of error
mu_ex_Delta = 0.5 * kT * abs(-np.log(gamma_mean + gamma_std) -
(-np.log(gamma_mean - gamma_std)))
return mu_ex_mean, mu_ex_Delta
| pkreissl/espresso | src/python/espressomd/reaction_ensemble.py | Python | gpl-3.0 | 21,499 |
# -*- encoding: utf-8 -*-
# Pilas engine - A video game framework.
#
# Copyright 2010 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
import pilas
from pilas.actores import Texto
class Opcion(Texto):
def __init__(self, texto, x=0, y=0, funcion_a_invocar=None):
Texto.__init__(self, texto, x=x, y=y)
self.magnitud = 20
self.funcion_a_invocar = funcion_a_invocar
self.color = pilas.colores.gris
self.z = -300
self.centro = ("centro", "centro")
def resaltar(self, estado=True):
"Pinta la opcion actual de un color mas claro."
if estado:
self.color = pilas.colores.blanco
else:
self.color = pilas.colores.gris
def seleccionar(self):
"Invoca a la funcion que tiene asociada para ejecutar."
if self.funcion_a_invocar:
self.funcion_a_invocar()
else:
print "Cuidado, la opcion", self, "no tiene funcion asociada."
| cristian99garcia/pilas-activity | pilas/actores/opcion.py | Python | gpl-3.0 | 1,044 |
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
"""Functionality related to the re-alignment of step fields."""
import enum
from typing import List
from rlds import rlds_types
from rlds.transformations import batched_helpers
from rlds.transformations import flexible_batch
import tensorflow.compat.v2 as tf
def shift_keys(
steps: tf.data.Dataset,
keys: List[str],
shift: int,
batch_size: int = flexible_batch.BATCH_AUTO_TUNE) -> tf.data.Dataset:
"""Shifts `key` from every element in `keys` with `shift` positions.
If step_i has keys A_i and B_i and we shift A -1 positions, step_i will
contain A_{i-1} and B_i. B_0 will be discarded, as well as A_n (where n
is the last step)
Args:
steps: `tf.data.Dataset` of steps.
keys: list of step keys that have to be shifted.
shift: number of positions to shift. Note that the shift is expressed as
an index. So, in step_i, the indicated keys will be now correspond
to the values of step_{i+shift}.
batch_size: batch size to use when applying the transformation. It should be
larger than `shift`. The default value (BATCH_AUTO_TUNE) makes batch size
selection automatic.
Returns:
`tf.data.Dataset` of steps where the elements with key in `keys` are
shifted. The resulting dataset is of size `steps.cardinality() - shift`
"""
if batch_size == flexible_batch.BATCH_AUTO_TUNE:
batch_size = max(4*shift, flexible_batch.get_batch_size(steps, batch_size))
def shift_field(field, shift):
if shift > 0:
return tf.nest.map_structure(lambda x: x[shift:], field)
else:
return tf.nest.map_structure(lambda x: x[:shift], field)
def realign_batch(batch):
result = {}
for k in batch:
if k not in keys:
result[k] = shift_field(batch[k], -shift)
else:
result[k] = shift_field(batch[k], shift)
return result
if abs(shift) >= batch_size:
raise ValueError(f'Trying to shift {shift} positions using a batch size '
'of {batch_size} (abs(shift) should be smaller than the '
'batch size).')
batch_shift = batch_size - abs(shift)
return batched_helpers.batched_map(
steps, map_fn=realign_batch, size=batch_size, shift=batch_shift)
@enum.unique
class AlignmentType(enum.Enum):
UNKNOWN = enum.auto()
SAR = enum.auto()
ARS = enum.auto()
RSA = enum.auto()
def add_alignment_to_step(step: rlds_types.Step, alignment: AlignmentType):
step[rlds_types.ALIGNMENT] = alignment.value
return step
| google-research/rlds | rlds/transformations/alignment.py | Python | apache-2.0 | 3,077 |
# Author: Jayden Navarro
# Date: 10/14/2015
class HashTable():
LOAD_THRESHOLD = 0.3
GOLDEN_VALUE = 0.618034
LOW_CAP = 10
def __init__(self, capacity=-1):
self.size = 0
self.capacity = max(self.LOW_CAP, capacity)
self.table = [None] * self.capacity
def loadFactor(self):
return self.size / self.capacity
def resize(self):
oldTable = self.table
self.capacity *= 2
self.table = [None] * self.capacity
for bucket in oldTable:
if bucket != None:
for element in bucket:
self._insert(element)
def insert(self, element):
self.size += 1
if self.loadFactor() > self.LOAD_THRESHOLD:
self.resize()
self._insert(element)
def _insert(self, element):
hashNum = self._hash(element)
if self.table[hashNum] == None:
self.table[hashNum] = []
self.table[hashNum].append(element)
def find(self, element):
bucket = self.table[self._hash(element)]
if bucket != None:
if element in bucket:
return True
return False
def remove(self, element):
self.size -= 1
bucket = self.table[self._hash(element)]
if bucket != None:
if element in bucket:
bucket.remove(element)
def _hash(self, element):
return int(self.capacity * (element * self.GOLDEN_VALUE -
int(element * self.GOLDEN_VALUE)))
HT = HashTable()
for i in range(32):
HT.insert(i)
assert HT.find(23) == True
assert HT.find(32) == False
HT.remove(23)
assert HT.find(23) == False
print("Hash Table Capacity: %d" % HT.capacity)
print("Hash Table Entries: %d" % HT.size)
print("Hash Table Load Factor: %0.4f" % HT.loadFactor())
| JDNdeveloper/Interview-Practice-Python | src/HashTable.py | Python | mit | 1,734 |
import sys
import unittest
sys.path.insert(1, '.')
import General_Maths
class test_stuff(unittest.TestCase):
def test_Sieve_upto_20(self):
expected = [2, 3, 5, 7, 11, 13, 17, 19]
actual = General_Maths.Sieve_Of_Eratosthenes(20)
self.assertEqual(expected, actual)
def test_sieve_with_prime(self):
expected = [2, 3, 5, 7, 11, 13, 17]
actual = General_Maths.Sieve_Of_Eratosthenes(17)
self.assertEqual(expected, actual)
def test_sieve_with_non_int(self):
self.assertRaises(General_Maths.NotIntegerError,
General_Maths.Sieve_Of_Eratosthenes, 'a')
def test_sieve_with_limit_less_than_two(self):
self.assertRaises(General_Maths.OutOfRangeError,
General_Maths.Sieve_Of_Eratosthenes, 1)
def test_find_prime_factors_of_twenty(self):
expected = { 2: 2, 5: 1}
actual = General_Maths.find_prime_factors(20)
self.assertEqual(expected, actual)
def test_find_prime_factor_prime_returns_prime(self):
expected = { 17: 1}
actual = General_Maths.find_prime_factors(17)
self.assertEqual(expected, actual)
def test_find_prime_factors_with_char(self):
self.assertRaises(General_Maths.NotIntegerError,
General_Maths.find_prime_factors, 'a')
def test_find_prime_factors_with_less_than_two(self):
self.assertRaises(General_Maths.OutOfRangeError,
General_Maths.find_prime_factors, -1)
def test_fibonacci_upto_ten(self):
expected = [1, 1, 2, 3, 5, 8]
actual = General_Maths.Fibonacci(10)
self.assertEqual(expected, actual)
def test_fibonacci_with_limit_less_than_one(self):
self.assertRaises(General_Maths.OutOfRangeError,
General_Maths.Fibonacci, -1)
def test_fibonacci_with_char(self):
self.assertRaises(General_Maths.NotIntegerError,
General_Maths.Fibonacci, 'a')
def test_fibonacci_with_non_int(self):
self.assertRaises(General_Maths.NotIntegerError,
General_Maths.Fibonacci, 0.5)
def test_reverse_number_with_123(self):
self.assertEqual(General_Maths.Reverse_Int(123), 321)
def test_reverse_number_with_non_integer(self):
self.assertRaises(General_Maths.NotIntegerError,
General_Maths.Reverse_Int, 'a')
def test_reverse_number_with_less_than_one(self):
self.assertRaises(General_Maths.OutOfRangeError,
General_Maths.Reverse_Int, -1)
if __name__ == '__main__':
unittest.main()
| jonathan0wilson/general_scripts | tests/General_Maths_Tests.py | Python | mit | 2,669 |
"""Imports:
TrackData: for containing the extracted information
"""
import TrackData
# Valid Frame IDs for each of the different versions
_V22_FRAME_IDS = [\
"BUF", "CNT", "COM", "CRA", "CRM", "ETC", "EQU", "GEO", "IPL", "LNK", "MCI",
"MLL", "PIC", "POP", "REV", "RVA", "SLT", "STC", "TAL", "TBP", "TCM", "TCO",
"TCR", "TDA", "TDY", "TEN", "TFT", "TIM", "TKE", "TLA", "TLE", "TMT", "TOA",
"TOF", "TOL", "TOR", "TOT", "TP1", "TP2", "TP3", "TP4", "TPA", "TPB", "TRC",
"TRD", "TRK", "TSI", "TSS", "TT1", "TT2", "TT3", "TXT", "TXX", "TYE", "UFI",
"ULT", "WAF", "WAR", "WAS", "WCM", "WCP", "WPB", "WXX"]
_V23_FRAME_IDS = [\
"AENC", "APIC", "COMM", "COMR", "ENCR", "EQUA", "ETCO", "GEOB", "GRID",
"IPLS", "LINK", "MCDI", "MLLT", "OWNE", "PRIV", "PCNT", "POPM", "POSS",
"RBUF", "RVAD", "RVRB", "SYLT", "SYTC", "TALB", "TBPM", "TCOM", "TCON",
"TCOP", "TDAT", "TDLY", "TENC", "TEXT", "TFLT", "TIME", "TIT1", "TIT2",
"TIT3", "TKEY", "TLAN", "TLEN", "TMED", "TOAL", "TOFN", "TOLY", "TOPE",
"TORY", "TOWN", "TPE1", "TPE2", "TPE3", "TPE4", "TPOS", "TPUB", "TRCK",
"TRDA", "TRSN", "TRSO", "TSIZ", "TSRC", "TSSE", "TYER", "TXXX", "UFID",
"USER", "USLT", "WCOM", "WCOP", "WOAF", "WOAR", "WOAS", "WORS", "WPAY",
"WPUB", "WXXX"]
_V24_FRAME_IDS = _V23_FRAME_IDS + [\
"ASPI", "EQU2", "RVA2", "SEEK", "SIGN", "TDEN", "TDOR", "TDRC", "TDRL",
"TDTG", "TIPL", "TMCL", "TMOO", "TPRO", "TSOA", "TSOP", "TSOT", "TSST"]
_V22_V23_FRAME_ID_MAPPINGS = {\
"BUF": "RBUF", "CNT": "PCNT", "COM": "COMM", "CRA": "AENC", "CRM": "ENCR",
"ETC": "ETCO", "EQU": "EQUA", "GEO": "GEOB", "IPL": "IPLS", "LNK": "LINK",
"MCI": "MCDI", "MLL": "MLLT", "PIC": "APIC", "POP": "POPM", "REV": "RVRB",
"RVA": "RVAD", "SLT": "SYLT", "STC": "SYTC", "TAL": "TALB", "TBP": "TBPM",
"TCM": "TCOM", "TCO": "TCON", "TCR": "TCOP", "TDA": "TDAT", "TDY": "TDLY",
"TEN": "TENC", "TFT": "TFLT", "TIM": "TIME", "TKE": "TKEY", "TLA": "TLAN",
"TLE": "TLEN", "TMT": "TMED", "TOA": "TOPE", "TOF": "TOFN", "TOL": "TOLY",
"TOR": "TORY", "TOT": "TOAL", "TP1": "TPE1", "TP2": "TPE2", "TP3": "TPE3",
"TP4": "TPE4", "TPA": "TPOS", "TPB": "TPUB", "TRC": "TSRC", "TRD": "TRDA",
"TRK": "TRCK", "TSI": "TSIZ", "TSS": "TSSE", "TT1": "TIT1", "TT2": "TIT2",
"TT3": "TIT3", "TXT": "TEXT", "TXX": "TXXX", "TYE": "TYER", "UFI": "UFID",
"ULT": "USLT", "WAF": "WOAF", "WAR": "WOAR", "WAS": "WOAS", "WCM": "WCOM",
"WCP": "WCOP", "WPB": "WPUB", "WXX": "WXXX"}
_V23_V22_FRAME_ID_MAPPINGS = {\
"RBUF": "BUF", "PCNT": "CNT", "COMM": "COM", "AENC": "CRA", "ENCR": "CRM",
"ETCO": "ETC", "EQUA": "EQU", "GEOB": "GEO", "IPLS": "IPL", "LINK": "LNK",
"MCDI": "MCI", "MLLT": "MLL", "APIC": "PIC", "POPM": "POP", "RVRB": "REV",
"RVAD": "RVA", "SYLT": "SLT", "SYTC": "STC", "TALB": "TAL", "TBPM": "TBP",
"TCOM": "TCM", "TCON": "TCO", "TCOP": "TCR", "TDAT": "TDA", "TDLY": "TDY",
"TENC": "TEN", "TFLT": "TFT", "TIME": "TIM", "TKEY": "TKE", "TLAN": "TLA",
"TLEN": "TLE", "TMED": "TMT", "TOPE": "TOA", "TOFN": "TOF", "TOLY": "TOL",
"TORY": "TOR", "TOAL": "TOT", "TPE1": "TP1", "TPE2": "TP2", "TPE3": "TP3",
"TPE4": "TP4", "TPOS": "TPA", "TPUB": "TPB", "TSRC": "TRC", "TRDA": "TRD",
"TRCK": "TRK", "TSIZ": "TSI", "TSSE": "TSS", "TIT1": "TT1", "TIT2": "TT2",
"TIT3": "TT3", "TEXT": "TXT", "TXXX": "TXX", "TYER": "TYE", "UFID": "UFI",
"USLT": "ULT", "WOAF": "WAF", "WOAR": "WAR", "WOAS": "WAS", "WCOM": "WCM",
"WCOP": "WCP", "WPUB": "WPB", "WXXX": "WXX"}
_EXPERIMENTAL_FRAME_ID_PREFIXS = ["X", "Y", "Z"]
def _read_32bit_syncsafe(byte_data):
"""Reads a 32-bit syncsafe integer (28 bits with 4 sync bits zeroed out)
Args:
byte_data: character array of bytes. Must be 4 bytes long
Returns:
unsigned int representation of the byte data
"""
int_bytes = [ord(b) for b in byte_data]
if (int_bytes[0] & 0x80) or \
(int_bytes[1] & 0x80) or \
(int_bytes[2] & 0x80) or \
(int_bytes[3] & 0x80):
raise Exception("Attempt to read an invalid 32-bit syncsafe integer")
return (int_bytes[0] << 21) \
+ (int_bytes[1] << 14) \
+ (int_bytes[2] << 7) \
+ (int_bytes[3])
def _read_32bit_nonsyncsafe(byte_data):
"""Reads a standard 32-bit unsigned integer
Args:
byte_data: character array of bytes. Must be 4 bytes long
Returns:
unsigned int representation of the byte data
"""
int_bytes = [ord(b) for b in byte_data]
return (int_bytes[0] << 24) \
+ (int_bytes[1] << 16) \
+ (int_bytes[2] << 8) \
+ (int_bytes[3])
def _read_24bit_nonsyncsafe(byte_data):
"""Reads a standard 24-bit unsigned integer
Args:
byte_data: character array of bytes. Must be 3 bytes long
Returns:
unsigned int representation of the byte data
"""
int_bytes = [ord(b) for b in byte_data]
return (int_bytes[0] << 16) \
+ (int_bytes[1] << 8) \
+ (int_bytes[2])
def _read_16bit_nonsyncsafe(byte_data):
"""Reads a standard 16-bit unsigned integer
Args:
byte_data: character array of bytes. Must be 2 bytes long
Returns:
unsigned int representation of the byte data
"""
int_bytes = [ord(b) for b in byte_data]
return (int_bytes[0] << 8) \
+ (int_bytes[1])
class _TagHeader(object):
"""ID3v2 tag header
Attributes:
version: int major version number (e.g. 4 for ID3v2.4.0)
version_minor: int minor version number (e.g. 0 for ID3v2.4.0)
header_size: int byte size of header data
body_size: int byte size of all non-header data in the tag
flags: dictionary mapping strings to bools for each available flag
extended_header_size: int byte size of this version's extended header,
if it has one. This attribute does not describe whether or not an
extended header exists, only what size it would be if it did.
frame_header_size: int byte size of this version's frame header size.
"""
def __init__(self, header_data):
"""Interprets byte data as a tag header to build object
Args:
header_data: character array of bytes representing the tag header
"""
# Check we're actually looking at an ID3v2 tag
if header_data[:3] != "ID3":
raise Exception("Given data does not contain an ID3v2 tag header")
# Extract version number and assert it's supported
self.version = ord(header_data[3])
self.version_minor = ord(header_data[4])
if self.version < 2 or self.version > 4 or self.version_minor == 0xFF:
raise Exception("Unknown version 'ID3v2.%d.%d'" % (self.version, \
self.version_minor))
# Extract flags depending on version
flag_int = ord(header_data[5])
self.flags = {}
if self.version == 2:
self.extended_header_size = 0
self.frame_header_size = 6
self.flags['unsynchronisation'] = flag_int & 0x80
self.flags['compression'] = flag_int & 0x40
unknown_flags = flag_int & ~0xC0
elif self.version == 3:
self.extended_header_size = 10
self.frame_header_size = 10
self.flags['unsynchronisation'] = flag_int & 0x80
self.flags['extended_header'] = flag_int & 0x40
self.flags['experimental'] = flag_int & 0x20
unknown_flags = flag_int & ~0xE0
elif self.version == 4:
self.extended_header_size = 6
self.frame_header_size = 10
self.flags['unsynchronisation'] = flag_int & 0x80
self.flags['extended_header'] = flag_int & 0x40
self.flags['experimental'] = flag_int & 0x20
self.flags['footer_present'] = flag_int & 0x10
unknown_flags = flag_int & ~0xF0
# TODO: Handle unsupported flags
# Ensure flags are valid
if unknown_flags != 0:
raise Exception("Unknown flags '0x%02X' (should be 0)" % (unknown_flags))
# Extract size
self.header_size = 10
self.body_size = _read_32bit_syncsafe(header_data[6:10])
# Assert frame size is valid
if self.body_size == 0:
raise Exception("Invalid Tag Size '0'")
def __str__(self):
"""Override string printing method"""
set_flags = [flag for flag in self.flags if self.flags[flag]]
return "ID3v2.%d.%d Size=%d %s" % (self.version, self.version_minor,
self.body_size, ','.join(set_flags))
def has_extended_header(self):
"""Whether or not this the tag has an extended header
Returns:
bool, True if the tag has an extended header, False otherwise
"""
if 'extended_header' in self.flags:
return self.flags['extended_header'] != 0
return False
class _TagExtendedHeader(object):
"""ID3v2 extended tag header
Attributes:
version: int version of the tag this extended header relates to
header_size: int byte size of header data
body_size: int byte size of all non-header data in this section
flags: dictionary mapping strings to bools for each available flag
"""
def __init__(self, version, xheader_data):
"""Interprets byte data as an extended tag header to build object
Args:
version: int version of the tag this frame was read from
xheader_data: character array of bytes representing the extended tag
header
"""
self.flags = {}
self.version = version
if version == 3:
self.header_size = 10
self.body_size = _read_32bit_nonsyncsafe(xheader_data[0:4])
flag_int = _read_16bit_nonsyncsafe(xheader_data[4:6])
self.flags['crc_data_present'] = flag_int & 0x8000
unknown_flags = flag_int & ~0x8000
elif version == 4:
self.header_size = 6
self.body_size = _read_32bit_syncsafe(xheader_data[0:4]) - 6
flag_int = ord(xheader_data[5])
self.flags['tag_is_an_update'] = flag_int & 0x40
self.flags['crc_data_present'] = flag_int & 0x20
self.flags['tag_restrictions'] = flag_int & 0x10
unknown_flags = flag_int & ~0x70
else:
raise Exception("Extended header on invalid version 'ID3v2.%d'" % (version))
# TODO: Handle unsupported flags
# Ensure flags are valid
if unknown_flags != 0:
raise Exception("Unknown flags '0x%02X' (should be 0)" % (unknown_flags))
def __str__(self):
"""Override string printing method"""
set_flags = [flag for flag in self.flags if self.flags[flag]]
return "ExtendedHeader Size=%d %s" % (self.body_size, ','.join(set_flags))
class _FrameHeader(object):
"""ID3v2 tag frame
Attributes:
id: string id of the frame
version: int version of the tag this frame was read from
body_offset: the absolute file offset of this frame's body
header_size: int byte size of header data
body_size: int byte size of all non-header data in the frame
flags: dictionary mapping strings to bools for each available flag
"""
def __init__(self, version, header_data, offset):
"""Interprets byte data as a frame header to build object
Args:
version: int version of the tag this frame was read from
header_data: character array of bytes representing the tag header
offset: int absolute file offset of this frame's body
"""
self.flags = {}
self.version = version
self.body_offset = offset
# Extract header information depending on version
if version == 2:
self.id = header_data[:3]
self.header_size = 6
self.body_size = _read_24bit_nonsyncsafe(header_data[3:6])
unknown_flags = 0
valid_ids = _V22_FRAME_IDS
elif version == 3:
self.id = header_data[:4]
self.header_size = 10
self.body_size = _read_32bit_nonsyncsafe(header_data[4:8])
flag_int = _read_16bit_nonsyncsafe(header_data[8:10])
self.flags['tag_alter_preservation'] = flag_int & 0x8000
self.flags['file_alter_preservation'] = flag_int & 0x4000
self.flags['read_only'] = flag_int & 0x2000
self.flags['compression'] = flag_int & 0x0080
self.flags['encryption'] = flag_int & 0x0040
self.flags['grouping_identity'] = flag_int & 0x0020
unknown_flags = flag_int & ~0xE0E0
valid_ids = _V23_FRAME_IDS
elif version == 4:
self.id = header_data[:4]
self.header_size = 10
self.body_size = _read_32bit_syncsafe(header_data[4:8])
flag_int = _read_16bit_nonsyncsafe(header_data[8:10])
self.flags['tag_alter_preservation'] = flag_int & 0x4000
self.flags['file_alter_preservation'] = flag_int & 0x2000
self.flags['read_only'] = flag_int & 0x1000
self.flags['grouping_identity'] = flag_int & 0x0040
self.flags['compression'] = flag_int & 0x0008
self.flags['encryption'] = flag_int & 0x0004
self.flags['unsynchronisation'] = flag_int & 0x0002
self.flags['data_length_indicator'] = flag_int & 0x0001
unknown_flags = flag_int & ~0x704F
valid_ids = _V24_FRAME_IDS
else:
raise Exception("Unknown tag version 'ID3v2.%d'" % (version))
# Assert frame id is known
if not (self.id[0] in _EXPERIMENTAL_FRAME_ID_PREFIXS or self.id in valid_ids):
if version == 3 and self.id[:3] in _V22_FRAME_IDS:
# Some archaeic players (iTunes 6.0 in particular) write out
# v2.3 tags but using v2.2 Frame IDs. Completely against the
# standard but this affects enough files it's worth addressing...
self.id = _V22_V23_FRAME_ID_MAPPINGS[self.id[:3]]
# TODO: Warning?
else:
raise Exception("Unknown ID3v2.%d Frame ID '%s'" % (self.version, self.id))
# Assert frame size is valid
# TODO: Handle this properly
if self.body_size == 0:
#raise Exception("Invalid ID3v2.%d Frame Size '0'" % (self.version))
print "WARNING: Empty frame found. Technically illegal"
# TODO: Handle unsupported flags
# Ensure flags are valid
if unknown_flags != 0:
raise Exception("Unknown ID3v2.%d Flags '0x%04X' (should be 0)" % \
(self.version, unknown_flags))
def __str__(self):
"""Override string printing method"""
set_flags = [flag for flag in self.flags if self.flags[flag]]
return "%s Size=%d %s" % (self.id, self.body_size, ','.join(set_flags))
def read_body(self, file_handle):
"""Reads this frame's body content from the file
Args:
file_handle: file handle to read the data from. Must be opened at
least as 'rb'.
Returns:
character byte array of data from the frame's body
"""
file_handle.seek(self.body_offset, 0)
return file_handle.read(self.body_size)
class _Tag(object):
"""ID3v2 tag
Attributes:
header: _TagHeader ID3v2 tag header
extended_header: _TagExtendedHeader ID3v2 tag extended header, or None
frames: list of _FrameHeader ID3v2 tag frame headers
"""
def __init__(self, file_handle):
"""Reads an ID3v2 tag from a file. File must contain a tag.
Args:
file_handle: file handle open in at least 'rb' mode to read tag from
"""
file_handle.seek(0, 0)
# Read header
header_data = file_handle.read(10)
self.header = _TagHeader(header_data)
total_size = self.header.header_size + self.header.body_size
# Read extended header (if applicable)
if self.header.has_extended_header():
xheader_data = file_handle.read(self.header.extended_header_size)
self.extended_header = _TagExtendedHeader(self.header.version, xheader_data)
file_handle.seek(self.extended_header.body_size, 1)
else:
self.extended_header = None
# Read frames
self.frames = {}
while file_handle.tell() < total_size:
fheader_data = file_handle.read(self.header.frame_header_size)
if fheader_data[0] == '\0':
# If we have read a null byte we have reached the end of the
# tag. It turns out the majority of ID3 tags are heavily padded
# and are actually significantly longer than necessary so
# editors can modify without having to rewrite the entire MP3
# file. This is poorly documented.
# The ID3 tags I have tested are typically between 500 and 1000
# bytes while actual allocation is around 4200 bytes per tag.
break
frame = _FrameHeader(self.header.version, fheader_data, file_handle.tell())
self.__add_frame(frame)
file_handle.seek(frame.body_size, 1)
def __str__(self):
"""Override string printing method"""
frames_str = '\n '.join([str(frame) for frame in self.frames])
if self.extended_header:
return "%s\n %s\n %s" % (str(self.header), str(self.extended_header), \
frames_str)
else:
return "%s\n %s" % (self.header, frames_str)
def __add_frame(self, frame):
"""Adds the frame header to this tag
Args:
frame: _FrameHeader frame header to add
Returns:
None
"""
self.frames[frame.id] = frame
def __get_frame(self, frame_id):
"""Retrieves the frame header with the given ID
Args:
frame_id: ID of the frame header to retrieve
Returns:
_FrameHeader frame header
"""
try:
return self.frames[frame_id]
except KeyError:
return None
def get_artist(self, file_handle):
"""Retrieves the track artist data from this tag
Args:
file_handle: open file handle to read the frame body from
Returns:
string track artist or None if this tag doesn't contain it
"""
version = self.header.version
if version == 2:
frame_id = "TP1"
elif version == 3 or version == 4:
frame_id = "TPE1"
else:
return None
frame = self.__get_frame(frame_id)
if frame:
return _read_frame_text(frame.read_body(file_handle))
return None
def get_album(self, file_handle):
"""Retrieves the track album data from this tag
Args:
file_handle: open file handle to read the frame body from
Returns:
string track album or None if this tag doesn't contain it
"""
version = self.header.version
if version == 2:
frame_id = "TAL"
elif version == 3 or version == 4:
frame_id = "TALB"
else:
return None
frame = self.__get_frame(frame_id)
if frame:
return _read_frame_text(frame.read_body(file_handle))
return None
def get_title(self, file_handle):
"""Retrieves the track title data from this tag
Args:
file_handle: open file handle to read the frame body from
Returns:
string track title or None if this tag doesn't contain it
"""
version = self.header.version
if version == 2:
frame_id = "TT2"
elif version == 3 or version == 4:
frame_id = "TIT2"
else:
return None
frame = self.__get_frame(frame_id)
if frame:
return _read_frame_text(frame.read_body(file_handle))
return None
def get_track(self, file_handle):
"""Retrieves the track number from this tag
Args:
file_handle: open file handle to read the frame body from
Returns:
int track number or None if this tag doesn't contain it
"""
version = self.header.version
if version == 2:
frame_id = "TRK"
elif version == 3 or version == 4:
frame_id = "TRCK"
else:
return None
frame = self.__get_frame(frame_id)
if frame:
body_data = _read_frame_text(frame.read_body(file_handle))
return TrackData.mint(body_data.split('/')[0])
return None
def get_year(self, file_handle):
"""Retrieves the track year from this tag
Args:
file_handle: open file handle to read the frame body from
Returns:
int track year or None if this tag doesn't contain it
"""
version = self.header.version
if version == 2:
frame_id = "TYE"
elif version == 3 or version == 4:
frame_id = "TYER"
else:
return None
frame = self.__get_frame(frame_id)
if frame:
body_data = _read_frame_text(frame.read_body(file_handle))
return TrackData.mint(body_data[0:4])
return None
def get_data(self, file_handle):
"""Extracts TrackData from this tag
Args:
file_handle: open file handle to read the relevant frames from
Returns:
TrackData with this tag's raw data
"""
data = TrackData.TrackData()
data.artist = self.get_artist(file_handle)
data.album = self.get_album(file_handle)
data.title = self.get_title(file_handle)
data.track = self.get_track(file_handle)
data.year = self.get_year(file_handle)
return data
def _read_frame_text(body_data):
"""Parses a textual frame body as a python string
Args:
body_data: character array of bytes read from the frame body
Returns:
python string, decoded according to its character encoding
"""
encoding = ord(body_data[0])
# TODO: Deal with unicode properly (not using encode('ascii', 'replace'))
if encoding == 0: # ISO-8859-1
return body_data[1:].decode('iso-8859-1').encode('ascii', 'replace')
elif encoding == 1: # UTF-16
return body_data[1:].decode('utf_16').encode('ascii', 'replace')
elif encoding == 2: # UTF-16BE
return body_data[1:].decode('utf_16_be').encode('ascii', 'replace')
elif encoding == 3: # UTF-8
return body_data[1:].decode('utf_8').encode('ascii', 'replace')
return body_data
def calculate_tag_size(file_handle):
"""Calculates the size of an ID3v2.x tag
Args:
file_handle: a file handle opened in a readable binary mode
Returns:
int number of bytes in the tag, or 0 if the file does not have one
"""
# Read the standard and extended tag headers
cursor_pos = file_handle.tell()
file_handle.seek(0, 0)
tag_header = file_handle.read(10)
file_handle.seek(cursor_pos, 0)
# Calculate tag size
if tag_header[:3] == "ID3":
tag = _TagHeader(tag_header)
return tag.header_size + tag.body_size
return 0
def read_tag_data(file_path):
"""Reads the ID3v2 tag data from a file (if present).
ID3 v2.2.x, 2.3.x and 2.4.x tags are all supported.
Args:
file_path: String path to the file to read the tag from.
Returns:
A TrackData with the fields initialised to the data read from the tag.
Non-present fields will be initialised to None. If no valid tag exists
None will be returned.
"""
with open(file_path, "rb", 0) as f:
has_tag = f.read(3) == "ID3"
# If we don't have a tag, drop out
if not has_tag:
return None
# Parse the tag
tag = _Tag(f)
data = tag.get_data(f)
# clean the strings generated
data.clean(False)
return data
return None
def create_tag_string(data, file_path):
"""Converts the given TrackData into a ID3v2.3.0 tag.
Args:
data: A TrackData object whose data will be put into the tag.
file_path: A string file path to the MP3 file this TrackData is
originally from. This is required as we read all other ID3v2 frames
from it so we may preserve them. This is a bit of a hack.
Returns:
A string of the correct byte length representing the ID3v2.3.0 tag.
"""
# FIXME: No need to take file_path and read file again. Should tag all
# frames to the TrackData originally returned.
def create_id3v2_frame_string(frame_id, frame_content):
"""Constructs an id3v2 text content frame.
Args:
frame_id: A string frame ID (four character identifier).
frame_content: A string to place in the frame.
Returns:
A string representing this text frame
"""
size = len(frame_content) + 2 # encoding mark + content + null byte
size_b1 = (size >> 24) % 256
size_b2 = (size >> 16) % 256
size_b3 = (size >> 8) % 256
size_b4 = size % 256
size_string = chr(size_b1) + chr(size_b2) + chr(size_b3) + chr(size_b4)
flag_string = chr(0) + chr(0)
frame = frame_id
frame += size_string
frame += flag_string
frame += '\00' # encoding mark (ISO-8859-1)
frame += frame_content
frame += '\00'
return frame
# read the entire input file in.
with open(file_path, "rb") as f:
track_data = f.read()
# check what existing id3v2 tag we have (if any). if we have one, separate
# the track from it.
had_id3v2_tag = False
if track_data[:3] == "ID3":
had_id3v2_tag = True
# Parse the tag header.
tag = _TagHeader(track_data)
total_tag_size = tag.header_size + tag.body_size
tag_data = track_data[:total_tag_size]
# create a new tag and add our data to it
# write the frames to it (we do this before we write the header so we can
# calculate the size)
new_frames = create_id3v2_frame_string("TIT2", data.title)
new_frames += create_id3v2_frame_string("TALB", data.album)
new_frames += create_id3v2_frame_string("TPE1", data.artist)
if data.track > 0:
new_frames += create_id3v2_frame_string("TRCK", str(data.track))
if data.year > 0:
new_frames += create_id3v2_frame_string("TYER", str(data.year))
# if we had an id3v2 tag before, copy the frames that we are not going to
# replace over from it. This leaves frames we aren't updating unchanged - an
# important consideration as some programs (i.e. windows media player) store
# their own data in them and in some cases the frames will store user data
# which will have taken some time to generate/collect, e.g. the POPM tag
# (though this is far from a standard itself).
if had_id3v2_tag:
# TODO: This is going to screw up with id3v2.2 tags
total_read_size = 10
while total_read_size < total_tag_size:
if tag_data[total_read_size] == '\00':
break
frame_data = tag_data[total_read_size:total_read_size+10]
frame_id = frame_data[0:4]
total_frame_size = _read_32bit_syncsafe(frame_data[4:8]) + 10
# TODO: This if statement could be extended to include other frames
# to be left out, or even replaced with just PRIV frames (UFID and
# POPM should probably also be kept as they contain information
# which will have been generated by other media players and is not
# easily reproducible). For now I have chosen to err on the side of
# caution and leave all other frames intact, but for a completely
# clean and identically tagged music collection this is an option.
if frame_id != "TALB" and \
frame_id != "TIT2" and \
frame_id != "TPE1" and \
frame_id != "TRCK" and \
frame_id != "TYER":
new_frames += tag_data[total_read_size:total_read_size+total_frame_size]
total_read_size += total_frame_size
# calculate the size and add padding (I don't really like this approach, but
# I guess there's a reason all the tracks I tested include large amounts of
# padding so I will re-pad). Doing it at this stage leaves the option to
# have the amount of padding added dependent on the tag size. For now simply
# add 500 bytes of padding.
new_frames += '\00' * 500
size = len(new_frames)
# produce the size string
size_b1 = (size >> 21) % 128
size_b2 = (size >> 14) % 128
size_b3 = (size >> 7) % 128
size_b4 = size % 128
size_string = chr(size_b1) + chr(size_b2) + chr(size_b3) + chr(size_b4)
# write the header
new_header = "ID3" # tag identifier
new_header += chr(3) + chr(0) # tag version number (v2.3.0)
new_header += chr(0) # flags
new_header += size_string
return new_header + new_frames
| jonsim/music_tagger | ID3v2.py | Python | gpl-3.0 | 29,580 |
import itertools
from sqlalchemy import Boolean
from sqlalchemy import delete
from sqlalchemy import exc as sa_exc
from sqlalchemy import func
from sqlalchemy import insert
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import select
from sqlalchemy import Sequence
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import type_coerce
from sqlalchemy import update
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import AssertsExecutionResults
from sqlalchemy.testing import engines
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.types import TypeDecorator
class ReturnCombinationTests(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "postgresql"
@testing.fixture
def table_fixture(self):
return Table(
"foo",
MetaData(),
Column("id", Integer, primary_key=True),
Column("q", Integer, server_default="5"),
Column("x", Integer),
Column("y", Integer),
)
@testing.combinations(
(
insert,
"INSERT INTO foo (id, q, x, y) "
"VALUES (%(id)s, %(q)s, %(x)s, %(y)s)",
),
(update, "UPDATE foo SET id=%(id)s, q=%(q)s, x=%(x)s, y=%(y)s"),
(delete, "DELETE FROM foo"),
argnames="dml_fn, sql_frag",
id_="na",
)
def test_return_combinations(self, table_fixture, dml_fn, sql_frag):
t = table_fixture
stmt = dml_fn(t)
stmt = stmt.returning(t.c.x)
stmt = stmt.returning(t.c.y)
self.assert_compile(
stmt,
"%s RETURNING foo.x, foo.y" % (sql_frag),
)
def test_return_no_return_defaults(self, table_fixture):
t = table_fixture
stmt = t.insert()
stmt = stmt.returning(t.c.x)
assert_raises_message(
sa_exc.InvalidRequestError,
"RETURNING is already configured on this statement",
stmt.return_defaults,
)
def test_return_defaults_no_returning(self, table_fixture):
t = table_fixture
stmt = t.insert()
stmt = stmt.return_defaults()
assert_raises_message(
sa_exc.InvalidRequestError,
r"return_defaults\(\) is already configured on this statement",
stmt.returning,
t.c.x,
)
class ReturningTest(fixtures.TablesTest, AssertsExecutionResults):
__requires__ = ("returning",)
__backend__ = True
run_create_tables = "each"
@classmethod
def define_tables(cls, metadata):
class GoofyType(TypeDecorator):
impl = String
cache_ok = True
def process_bind_param(self, value, dialect):
if value is None:
return None
return "FOO" + value
def process_result_value(self, value, dialect):
if value is None:
return None
return value + "BAR"
cls.GoofyType = GoofyType
Table(
"tables",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("persons", Integer),
Column("full", Boolean),
Column("goofy", GoofyType(50)),
Column("strval", String(50)),
)
def test_column_targeting(self, connection):
table = self.tables.tables
result = connection.execute(
table.insert().returning(table.c.id, table.c.full),
{"persons": 1, "full": False},
)
row = result.first()._mapping
assert row[table.c.id] == row["id"] == 1
assert row[table.c.full] == row["full"]
assert row["full"] is False
result = connection.execute(
table.insert()
.values(persons=5, full=True, goofy="somegoofy")
.returning(table.c.persons, table.c.full, table.c.goofy)
)
row = result.first()._mapping
assert row[table.c.persons] == row["persons"] == 5
assert row[table.c.full] == row["full"]
eq_(row[table.c.goofy], row["goofy"])
eq_(row["goofy"], "FOOsomegoofyBAR")
def test_labeling(self, connection):
table = self.tables.tables
result = connection.execute(
table.insert()
.values(persons=6)
.returning(table.c.persons.label("lala"))
)
row = result.first()._mapping
assert row["lala"] == 6
def test_anon_expressions(self, connection):
table = self.tables.tables
GoofyType = self.GoofyType
result = connection.execute(
table.insert()
.values(goofy="someOTHERgoofy")
.returning(func.lower(table.c.goofy, type_=GoofyType))
)
row = result.first()
eq_(row[0], "foosomeothergoofyBAR")
result = connection.execute(
table.insert().values(persons=12).returning(table.c.persons + 18)
)
row = result.first()
eq_(row[0], 30)
def test_update_returning(self, connection):
table = self.tables.tables
connection.execute(
table.insert(),
[{"persons": 5, "full": False}, {"persons": 3, "full": False}],
)
result = connection.execute(
table.update()
.values(dict(full=True))
.where(table.c.persons > 4)
.returning(table.c.id)
)
eq_(result.fetchall(), [(1,)])
result2 = connection.execute(
select(table.c.id, table.c.full).order_by(table.c.id)
)
eq_(result2.fetchall(), [(1, True), (2, False)])
@testing.fails_on(
"mssql",
"driver has unknown issue with string concatenation "
"in INSERT RETURNING",
)
def test_insert_returning_w_expression_one(self, connection):
table = self.tables.tables
result = connection.execute(
table.insert().returning(table.c.strval + "hi"),
{"persons": 5, "full": False, "strval": "str1"},
)
eq_(result.fetchall(), [("str1hi",)])
result2 = connection.execute(
select(table.c.id, table.c.strval).order_by(table.c.id)
)
eq_(result2.fetchall(), [(1, "str1")])
def test_insert_returning_w_type_coerce_expression(self, connection):
table = self.tables.tables
result = connection.execute(
table.insert().returning(type_coerce(table.c.goofy, String)),
{"persons": 5, "goofy": "somegoofy"},
)
eq_(result.fetchall(), [("FOOsomegoofy",)])
result2 = connection.execute(
select(table.c.id, table.c.goofy).order_by(table.c.id)
)
eq_(result2.fetchall(), [(1, "FOOsomegoofyBAR")])
def test_update_returning_w_expression_one(self, connection):
table = self.tables.tables
connection.execute(
table.insert(),
[
{"persons": 5, "full": False, "strval": "str1"},
{"persons": 3, "full": False, "strval": "str2"},
],
)
result = connection.execute(
table.update()
.where(table.c.persons > 4)
.values(full=True)
.returning(table.c.strval + "hi")
)
eq_(result.fetchall(), [("str1hi",)])
result2 = connection.execute(
select(table.c.id, table.c.strval).order_by(table.c.id)
)
eq_(result2.fetchall(), [(1, "str1"), (2, "str2")])
def test_update_returning_w_type_coerce_expression(self, connection):
table = self.tables.tables
connection.execute(
table.insert(),
[
{"persons": 5, "goofy": "somegoofy1"},
{"persons": 3, "goofy": "somegoofy2"},
],
)
result = connection.execute(
table.update()
.where(table.c.persons > 4)
.values(goofy="newgoofy")
.returning(type_coerce(table.c.goofy, String))
)
eq_(result.fetchall(), [("FOOnewgoofy",)])
result2 = connection.execute(
select(table.c.id, table.c.goofy).order_by(table.c.id)
)
eq_(
result2.fetchall(),
[(1, "FOOnewgoofyBAR"), (2, "FOOsomegoofy2BAR")],
)
@testing.requires.full_returning
def test_update_full_returning(self, connection):
table = self.tables.tables
connection.execute(
table.insert(),
[{"persons": 5, "full": False}, {"persons": 3, "full": False}],
)
result = connection.execute(
table.update()
.where(table.c.persons > 2)
.values(full=True)
.returning(table.c.id, table.c.full)
)
eq_(result.fetchall(), [(1, True), (2, True)])
@testing.requires.full_returning
def test_delete_full_returning(self, connection):
table = self.tables.tables
connection.execute(
table.insert(),
[{"persons": 5, "full": False}, {"persons": 3, "full": False}],
)
result = connection.execute(
table.delete().returning(table.c.id, table.c.full)
)
eq_(result.fetchall(), [(1, False), (2, False)])
def test_insert_returning(self, connection):
table = self.tables.tables
result = connection.execute(
table.insert().returning(table.c.id), {"persons": 1, "full": False}
)
eq_(result.fetchall(), [(1,)])
@testing.requires.multivalues_inserts
def test_multirow_returning(self, connection):
table = self.tables.tables
ins = (
table.insert()
.returning(table.c.id, table.c.persons)
.values(
[
{"persons": 1, "full": False},
{"persons": 2, "full": True},
{"persons": 3, "full": False},
]
)
)
result = connection.execute(ins)
eq_(result.fetchall(), [(1, 1), (2, 2), (3, 3)])
def test_no_ipk_on_returning(self, connection):
table = self.tables.tables
result = connection.execute(
table.insert().returning(table.c.id), {"persons": 1, "full": False}
)
assert_raises_message(
sa_exc.InvalidRequestError,
r"Can't call inserted_primary_key when returning\(\) is used.",
getattr,
result,
"inserted_primary_key",
)
@testing.fails_on_everything_except("postgresql")
def test_literal_returning(self, connection):
if testing.against("postgresql"):
literal_true = "true"
else:
literal_true = "1"
result4 = connection.exec_driver_sql(
'insert into tables (id, persons, "full") '
"values (5, 10, %s) returning persons" % literal_true
)
eq_([dict(row._mapping) for row in result4], [{"persons": 10}])
def test_delete_returning(self, connection):
table = self.tables.tables
connection.execute(
table.insert(),
[{"persons": 5, "full": False}, {"persons": 3, "full": False}],
)
result = connection.execute(
table.delete().where(table.c.persons > 4).returning(table.c.id)
)
eq_(result.fetchall(), [(1,)])
result2 = connection.execute(
select(table.c.id, table.c.full).order_by(table.c.id)
)
eq_(result2.fetchall(), [(2, False)])
class CompositeStatementTest(fixtures.TestBase):
__requires__ = ("returning",)
__backend__ = True
@testing.provide_metadata
def test_select_doesnt_pollute_result(self, connection):
class MyType(TypeDecorator):
impl = Integer
cache_ok = True
def process_result_value(self, value, dialect):
raise Exception("I have not been selected")
t1 = Table("t1", self.metadata, Column("x", MyType()))
t2 = Table("t2", self.metadata, Column("x", Integer))
self.metadata.create_all(connection)
connection.execute(t1.insert().values(x=5))
stmt = (
t2.insert()
.values(x=select(t1.c.x).scalar_subquery())
.returning(t2.c.x)
)
result = connection.execute(stmt)
eq_(result.scalar(), 5)
class SequenceReturningTest(fixtures.TablesTest):
__requires__ = "returning", "sequences"
__backend__ = True
@classmethod
def define_tables(cls, metadata):
seq = Sequence("tid_seq")
Table(
"tables",
metadata,
Column(
"id",
Integer,
seq,
primary_key=True,
),
Column("data", String(50)),
)
cls.sequences.tid_seq = seq
def test_insert(self, connection):
table = self.tables.tables
r = connection.execute(
table.insert().values(data="hi").returning(table.c.id)
)
eq_(r.first(), tuple([testing.db.dialect.default_sequence_base]))
eq_(
connection.execute(self.sequences.tid_seq),
testing.db.dialect.default_sequence_base + 1,
)
class KeyReturningTest(fixtures.TablesTest, AssertsExecutionResults):
"""test returning() works with columns that define 'key'."""
__requires__ = ("returning",)
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table(
"tables",
metadata,
Column(
"id",
Integer,
primary_key=True,
key="foo_id",
test_needs_autoincrement=True,
),
Column("data", String(20)),
)
@testing.exclude("postgresql", "<", (8, 2), "8.2+ feature")
def test_insert(self, connection):
table = self.tables.tables
result = connection.execute(
table.insert().returning(table.c.foo_id), dict(data="somedata")
)
row = result.first()._mapping
assert row[table.c.foo_id] == row["id"] == 1
result = connection.execute(table.select()).first()._mapping
assert row[table.c.foo_id] == row["id"] == 1
class ReturnDefaultsTest(fixtures.TablesTest):
__requires__ = ("returning",)
run_define_tables = "each"
__backend__ = True
@classmethod
def define_tables(cls, metadata):
from sqlalchemy.sql import ColumnElement
from sqlalchemy.ext.compiler import compiles
counter = itertools.count()
class IncDefault(ColumnElement):
pass
@compiles(IncDefault)
def compile_(element, compiler, **kw):
return str(next(counter))
Table(
"t1",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("data", String(50)),
Column("insdef", Integer, default=IncDefault()),
Column("upddef", Integer, onupdate=IncDefault()),
)
def test_chained_insert_pk(self, connection):
t1 = self.tables.t1
result = connection.execute(
t1.insert().values(upddef=1).return_defaults(t1.c.insdef)
)
eq_(
[
result.returned_defaults._mapping[k]
for k in (t1.c.id, t1.c.insdef)
],
[1, 0],
)
def test_arg_insert_pk(self, connection):
t1 = self.tables.t1
result = connection.execute(
t1.insert().return_defaults(t1.c.insdef).values(upddef=1)
)
eq_(
[
result.returned_defaults._mapping[k]
for k in (t1.c.id, t1.c.insdef)
],
[1, 0],
)
def test_chained_update_pk(self, connection):
t1 = self.tables.t1
connection.execute(t1.insert().values(upddef=1))
result = connection.execute(
t1.update().values(data="d1").return_defaults(t1.c.upddef)
)
eq_(
[result.returned_defaults._mapping[k] for k in (t1.c.upddef,)], [1]
)
def test_arg_update_pk(self, connection):
t1 = self.tables.t1
connection.execute(t1.insert().values(upddef=1))
result = connection.execute(
t1.update().return_defaults(t1.c.upddef).values(data="d1")
)
eq_(
[result.returned_defaults._mapping[k] for k in (t1.c.upddef,)], [1]
)
def test_insert_non_default(self, connection):
"""test that a column not marked at all as a
default works with this feature."""
t1 = self.tables.t1
result = connection.execute(
t1.insert().values(upddef=1).return_defaults(t1.c.data)
)
eq_(
[
result.returned_defaults._mapping[k]
for k in (t1.c.id, t1.c.data)
],
[1, None],
)
def test_update_non_default(self, connection):
"""test that a column not marked at all as a
default works with this feature."""
t1 = self.tables.t1
connection.execute(t1.insert().values(upddef=1))
result = connection.execute(
t1.update().values(upddef=2).return_defaults(t1.c.data)
)
eq_(
[result.returned_defaults._mapping[k] for k in (t1.c.data,)],
[None],
)
def test_insert_sql_expr(self, connection):
from sqlalchemy import literal
t1 = self.tables.t1
result = connection.execute(
t1.insert().return_defaults().values(insdef=literal(10) + 5)
)
eq_(
result.returned_defaults._mapping,
{"id": 1, "data": None, "insdef": 15, "upddef": None},
)
def test_update_sql_expr(self, connection):
from sqlalchemy import literal
t1 = self.tables.t1
connection.execute(t1.insert().values(upddef=1))
result = connection.execute(
t1.update().values(upddef=literal(10) + 5).return_defaults()
)
eq_(result.returned_defaults._mapping, {"upddef": 15})
def test_insert_non_default_plus_default(self, connection):
t1 = self.tables.t1
result = connection.execute(
t1.insert()
.values(upddef=1)
.return_defaults(t1.c.data, t1.c.insdef)
)
eq_(
dict(result.returned_defaults._mapping),
{"id": 1, "data": None, "insdef": 0},
)
eq_(result.inserted_primary_key, (1,))
def test_update_non_default_plus_default(self, connection):
t1 = self.tables.t1
connection.execute(t1.insert().values(upddef=1))
result = connection.execute(
t1.update()
.values(insdef=2)
.return_defaults(t1.c.data, t1.c.upddef)
)
eq_(
dict(result.returned_defaults._mapping),
{"data": None, "upddef": 1},
)
def test_insert_all(self, connection):
t1 = self.tables.t1
result = connection.execute(
t1.insert().values(upddef=1).return_defaults()
)
eq_(
dict(result.returned_defaults._mapping),
{"id": 1, "data": None, "insdef": 0},
)
eq_(result.inserted_primary_key, (1,))
def test_update_all(self, connection):
t1 = self.tables.t1
connection.execute(t1.insert().values(upddef=1))
result = connection.execute(
t1.update().values(insdef=2).return_defaults()
)
eq_(dict(result.returned_defaults._mapping), {"upddef": 1})
@testing.requires.insert_executemany_returning
def test_insert_executemany_no_defaults_passed(self, connection):
t1 = self.tables.t1
result = connection.execute(
t1.insert().return_defaults(),
[
{"data": "d1"},
{"data": "d2"},
{"data": "d3"},
{"data": "d4"},
{"data": "d5"},
{"data": "d6"},
],
)
eq_(
[row._mapping for row in result.returned_defaults_rows],
[
{"id": 1, "insdef": 0, "upddef": None},
{"id": 2, "insdef": 0, "upddef": None},
{"id": 3, "insdef": 0, "upddef": None},
{"id": 4, "insdef": 0, "upddef": None},
{"id": 5, "insdef": 0, "upddef": None},
{"id": 6, "insdef": 0, "upddef": None},
],
)
eq_(
result.inserted_primary_key_rows,
[(1,), (2,), (3,), (4,), (5,), (6,)],
)
assert_raises_message(
sa_exc.InvalidRequestError,
"This statement was an executemany call; "
"if return defaults is supported",
lambda: result.returned_defaults,
)
assert_raises_message(
sa_exc.InvalidRequestError,
"This statement was an executemany call; "
"if primary key returning is supported",
lambda: result.inserted_primary_key,
)
@testing.requires.insert_executemany_returning
def test_insert_executemany_insdefault_passed(self, connection):
t1 = self.tables.t1
result = connection.execute(
t1.insert().return_defaults(),
[
{"data": "d1", "insdef": 11},
{"data": "d2", "insdef": 12},
{"data": "d3", "insdef": 13},
{"data": "d4", "insdef": 14},
{"data": "d5", "insdef": 15},
{"data": "d6", "insdef": 16},
],
)
eq_(
[row._mapping for row in result.returned_defaults_rows],
[
{"id": 1, "upddef": None},
{"id": 2, "upddef": None},
{"id": 3, "upddef": None},
{"id": 4, "upddef": None},
{"id": 5, "upddef": None},
{"id": 6, "upddef": None},
],
)
eq_(
result.inserted_primary_key_rows,
[(1,), (2,), (3,), (4,), (5,), (6,)],
)
assert_raises_message(
sa_exc.InvalidRequestError,
"This statement was an executemany call; "
"if return defaults is supported",
lambda: result.returned_defaults,
)
assert_raises_message(
sa_exc.InvalidRequestError,
"This statement was an executemany call; "
"if primary key returning is supported",
lambda: result.inserted_primary_key,
)
@testing.requires.insert_executemany_returning
def test_insert_executemany_only_pk_passed(self, connection):
t1 = self.tables.t1
result = connection.execute(
t1.insert().return_defaults(),
[
{"id": 10, "data": "d1"},
{"id": 11, "data": "d2"},
{"id": 12, "data": "d3"},
{"id": 13, "data": "d4"},
{"id": 14, "data": "d5"},
{"id": 15, "data": "d6"},
],
)
eq_(
[row._mapping for row in result.returned_defaults_rows],
[
{"insdef": 0, "upddef": None},
{"insdef": 0, "upddef": None},
{"insdef": 0, "upddef": None},
{"insdef": 0, "upddef": None},
{"insdef": 0, "upddef": None},
{"insdef": 0, "upddef": None},
],
)
eq_(
result.inserted_primary_key_rows,
[(10,), (11,), (12,), (13,), (14,), (15,)],
)
class ImplicitReturningFlag(fixtures.TestBase):
__backend__ = True
def test_flag_turned_off(self):
e = engines.testing_engine(options={"implicit_returning": False})
assert e.dialect.implicit_returning is False
c = e.connect()
c.close()
assert e.dialect.implicit_returning is False
def test_flag_turned_on(self):
e = engines.testing_engine(options={"implicit_returning": True})
assert e.dialect.implicit_returning is True
c = e.connect()
c.close()
assert e.dialect.implicit_returning is True
def test_flag_turned_default(self):
supports = [False]
def go():
supports[0] = True
testing.requires.returning(go)()
e = engines.testing_engine()
# version detection on connect sets it
c = e.connect()
c.close()
assert e.dialect.implicit_returning is supports[0]
| zzzeek/sqlalchemy | test/sql/test_returning.py | Python | mit | 25,076 |
"""
WSGI config for dndsite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dndsite.settings")
application = get_wsgi_application()
| TheIncredibleK/dndtoolbox | dndsite/wsgi.py | Python | mit | 392 |
##
## This file is part of the sigrok project.
##
## Copyright (C) 2012 Uwe Hermann <uwe@hermann-uwe.de>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##
'''
Epson RTC-8564 JE/NB protocol decoder.
Details:
TODO
'''
from .rtc8564 import *
| robacklin/sigrok | decoders/rtc8564/__init__.py | Python | gpl-3.0 | 912 |
"""
.. UIExample:: 100
from flexx import app, ui
# A red widget
class Example(ui.Widget):
CSS = ".flx-example {background:#f00; min-width: 20px; min-height:20px}"
"""
import json
import threading
from .. import react
from ..app import Pair, get_instance_by_id
from ..app.serialize import serializer
def _check_two_scalars(name, v):
if not (isinstance(v, (list, tuple)) and
isinstance(v[0], (int, float)) and
isinstance(v[1], (int, float))):
raise ValueError('%s must be a tuple of two scalars.' % name)
return float(v[0]), float(v[1])
# Keep track of stack of default parents when using widgets
# as context managers. Have one list for each thread.
_default_parents_per_thread = {} # dict of threadid -> list
def _get_default_parents():
""" Get list that represents the stack of default parents.
Each thread has its own stack.
"""
# Get thread id
if hasattr(threading, 'current_thread'):
tid = id(threading.current_thread())
else:
tid = id(threading.currentThread())
# Get list of parents for this thread
return _default_parents_per_thread.setdefault(tid, [])
class Widget(Pair):
""" Base widget class.
In HTML-speak, this represents a plain div-element. Not very useful
on itself, except perhaps to fill up space. Subclass to create
something interesting.
When *subclassing* a Widget to create a compound widget (a widget
that serves as a container for other widgets), use the ``init()``
method to initialize child widgets. This method is called while
the widget is the current widget.
When subclassing to create a custom widget use the ``_init()``
method both for the Python and JS version of the class.
"""
def __init__(self, **kwargs):
# todo: -> parent is widget or ref to div element
parent = kwargs.pop('parent', None)
# Apply default parent?
if parent is None:
default_parents = _get_default_parents()
if default_parents:
parent = default_parents[-1]
# Use parent proxy unless proxy was given
if parent is not None and not kwargs.get('_proxy', None):
kwargs['proxy'] = parent.proxy
# Provide css class name to JS
classes = ['flx-' + c.__name__.lower() for c in self.__class__.mro()]
classname = ' '.join(classes[:1-len(Widget.mro())])
# Pass properties via kwargs
kwargs['_css_class_name'] = classname
kwargs['parent'] = parent
Pair.__init__(self, **kwargs)
with self:
self.init()
# Signal dependencies may have been added during init(), also in JS
self.connect_signals(False)
cmd = 'flexx.instances.%s.connect_signals(false);' % self._id
self._proxy._exec(cmd)
def _repr_html_(self):
""" This is to get the widget shown inline in the notebook.
"""
if self.container_id():
return "<i>This widget is already shown in this notebook</i>"
container_id = self.id + '_container'
def set_cointainer_id():
self.container_id._set(container_id)
# Set container id, this gets applied in the next event loop
# iteration, so by the time it gets called in JS, the div that
# we define below will have been created.
from ..app import call_later
call_later(0.1, set_cointainer_id) # todo: always do calls in next iter
return "<div class='flx-container' id=%s />" % container_id
def init(self):
""" Overload this to initialize a cusom widget. Inside, this
widget is the current parent.
"""
pass
def disconnect_signals(self, *args):
""" Overloaded version of disconnect_signals() that will also
disconnect the signals of any child widgets.
"""
children = self.children()
Pair.disconnect_signals(self, *args)
for child in children:
child.disconnect_signals(*args)
def __enter__(self):
# Note that __exit__ is guaranteed to be called, so there is
# no need to use weak refs for items stored in default_parents
default_parents = _get_default_parents()
default_parents.append(self)
return self
def __exit__(self, type, value, traceback):
default_parents = _get_default_parents()
assert self is default_parents.pop(-1)
#if value is None:
# self.update()
@react.source
def container_id(v=''):
""" The id of the DOM element that contains this widget if
parent is None.
"""
return str(v)
@react.input
def parent(v=None):
""" The parent widget, or None if it has no parent.
"""
if v is None or isinstance(v, Widget):
return v
else:
raise ValueError('parent must be a widget or None')
# Note that both the Py and JS have their own children signal
# todo: prevent unnecesary updates
@react.source
def children(v=()):
""" The child widgets of this widget.
"""
assert all([isinstance(w, Widget) for w in v])
return tuple(v)
@react.input
def flex(v=0):
""" How much space this widget takes when contained in a layout.
A flex of 0 means to take the minimum size.
"""
return float(v)
@react.input
def pos(v=(0, 0)):
""" The position of the widget when it in a ayout that allows
positioning.
"""
return _check_two_scalars('pos', v)
@react.input
def size(v=(0, 0)):
""" The size of the widget when it in a layout that allows
positioning.
"""
return _check_two_scalars('size', v)
@react.input
def min_size(v=(0, 0)):
""" The minimum size of the widget.
"""
return _check_two_scalars('min_size', v)
@react.input
def bgcolor(v=''):
""" Background color of the widget. In general it is better to do
styling via CSS.
"""
return str(v)
# todo: can we calculate this in JS somehow?
@react.input
def _css_class_name(self, v=''):
v = str(v)
if getattr(self, '_IS_APP', False): # set when a widget is made into an app
v = 'flx-main-widget ' + v
return v
@react.connect('parent')
def _parent_changed_py(self, new_parent):
old_parent = self.parent.last_value
if old_parent is not None:
children = list(old_parent.children()[:])
while self in children:
children.remove(self)
old_parent.children._set(children)
if new_parent is not None:
children = list(new_parent.children()[:])
children.append(self)
new_parent.children._set(children)
CSS = """
.flx-container {
min-height: 10px; /* splitter sets its own minsize if contained */
}
.flx-widget {
box-sizing: border-box;
white-space: nowrap;
overflow: hidden;
}
.flx-main-widget {
width: 100%;
height: 100%;
}
"""
class JS:
def _init(self):
self._create_node()
flexx.get('body').appendChild(this.node)
# todo: allow setting a placeholder DOM element, or any widget parent
# Create closure to check for size changes
self._stored_size = 0, 0
self._checking_size = False
that = this
def _check_resize():
# Re-raise in next event loop iteration
if not that._checking_size:
setTimeout(_check_resize_now, 0.001)
that._checking_size = True
def _check_resize_now():
that._checking_size = False
node = that.node
widthChanged = (that._stored_size[0] != node.offsetWidth)
heightChanged = (that._stored_size[1] != node.offsetHeight)
if widthChanged or heightChanged:
that.actual_size._set([node.offsetWidth, node.offsetHeight])
self._check_resize = _check_resize
self._check_resize()
super()._init()
# @react.source
# def children(v=()):
# """ The child widgets of this widget.
# """
# for w in v:
# if not isinstance(w, flexx.classes.Widget):
# raise ValueError('Children should be Widget objects.')
# return v
@react.source
def actual_size(v=(0, 0)):
""" The real (actual) size of the widget.
"""
return v[0], v[1]
def _create_node(self):
this.node = document.createElement('div')
@react.connect('_css_class_name')
def _css_class_name_changed(self, v):
this.node.className = v
def _add_child(self, widget):
""" Add the DOM element. Called right after the child widget is added. """
# May be overloaded in layout widgets
self.node.appendChild(widget.node)
def _remove_child(self, widget):
""" Remove the DOM element. Called right after the child widget is removed. """
self.node.removeChild(widget.node)
@react.connect('parent')
def _parent_changed(self, new_parent):
old_parent = self.parent.last_value
if old_parent is not None and old_parent is not undefined:
children = old_parent.children()[:]
while self in children:
children.remove(self)
old_parent.children._set(children) # we set it directly
old_parent._remove_child(self)
if new_parent is not None:
children = new_parent.children()[:]
children.append(self)
new_parent.children._set(children)
new_parent._add_child(self)
@react.connect('parent.actual_size')
def _keep_size_up_to_date1(self, size):
#print(self._id, 'resize 1', size)
self._check_resize()
@react.connect('parent', 'container_id')
def _keep_size_up_to_date2(self, parent, id):
#print(self._id, 'resize2 ', parent, id)
if parent is None:
window.addEventListener('resize', self._check_resize, False)
else:
window.removeEventListener('resize', self._check_resize, False)
self._check_resize()
@react.connect('pos')
def _pos_changed(self, pos):
self.node.style.left = pos[0] + "px" if (pos[0] > 1) else pos[0] * 100 + "%"
self.node.style.top = pos[1] + "px" if (pos[1] > 1) else pos[1] * 100 + "%"
@react.connect('size')
def _size_changed(self, size):
size = size[:]
for i in range(2):
if size[i] == 0 or size is None or size is undefined:
size[i] = '' # Use size defined by CSS
elif size[i] > 1:
size[i] = size[i] + 'px'
else:
size[i] = size[i] * 100 + 'px'
self.node.style.width = size[0]
self.node.style.height = size[1]
@react.connect('bgcolor')
def _bgcolor_changed(self, color):
self.node.style['background-color'] = color
@react.connect('container_id')
def _container_id_changed(self, id):
#if self._parent:
# return
if id:
el = document.getElementById(id)
el.appendChild(this.node)
## Children and parent
# @property
# def children(self):
# return self._children
#
# def _add_child(self, widget):
# pass # special hook to introduce a child inside this widget
#
# def _remove_child(self, widget):
# pass # special hook to remove a child out from this widget
#
#
# @react.connect('parent')
# def _parent_changed(self, new_parent):
# old_parent = self.parent.previous_value
# if old_parent is not None:
# children = list(old_parent.children())
# while self in children:
# children.remove(self)
# #old_parent._set_prop('children', children) # bypass readonly
# old_parent.children._set(children)
# old_parent._remove_child(self)
# if new_parent is not None:
# children = list(new_parent.children())
# children.append(self)
# #new_parent._set_prop('children', children)
# new_parent.children._set(children)
# new_parent._add_child(self)
| synmnstr/flexx | flexx/ui/_widget.py | Python | bsd-2-clause | 13,195 |
import numbers
import inspect
import numpy as np
import tensorflow as tf
from neupy import init
from neupy.utils import as_tuple
from neupy.core.docs import SharedDocs
__all__ = (
'BaseProperty', 'Property', 'ArrayProperty', 'BoundedProperty',
'ProperFractionProperty', 'NumberProperty', 'IntProperty',
'TypedListProperty', 'ChoiceProperty', 'WithdrawProperty',
'ParameterProperty', 'FunctionWithOptionsProperty',
)
number_type = (int, float, np.floating, np.integer)
class BaseProperty(SharedDocs):
"""
Base class for properties.
Parameters
----------
default : object
Default property value. Defaults to ``None``.
required : bool
If parameter equal to ``True`` and value is not defined
after initialization then exception will be triggered.
Defaults to ``False``.
allow_none : bool
When value is equal to ``True`` than ``None`` is a valid
value for the parameter. Defaults to ``False``.
Attributes
----------
name : str or None
Name of the property. ``None`` in case if name
wasn't specified.
expected_type : tuple or object
Expected data types of the property.
"""
expected_type = object
def __init__(self, default=None, required=False, allow_none=False):
self.name = None
self.default = default
self.required = required
self.allow_none = allow_none
if allow_none:
self.expected_type = as_tuple(self.expected_type, type(None))
def __set__(self, instance, value):
if not self.allow_none or value is not None:
self.validate(value)
instance.__dict__[self.name] = value
def __get__(self, instance, owner):
if instance is None:
return
if self.default is not None and self.name not in instance.__dict__:
self.__set__(instance, self.default)
return instance.__dict__.get(self.name, None)
def validate(self, value):
"""
Validate properties value
Parameters
----------
value : object
"""
if not isinstance(value, self.expected_type):
availabe_types = as_tuple(self.expected_type)
availabe_types = ', '.join(t.__name__ for t in availabe_types)
dtype = value.__class__.__name__
raise TypeError(
"Invalid data type `{0}` for `{1}` property. "
"Expected types: {2}".format(dtype, self.name, availabe_types))
def __repr__(self):
classname = self.__class__.__name__
if self.name is None:
return '{}()'.format(classname)
return '{}(name="{}")'.format(classname, self.name)
class WithdrawProperty(object):
"""
Defines inherited property that needs to be withdrawn.
Attributes
----------
name : str or None
Name of the property. ``None`` in case if name
wasn't specified.
"""
def __get__(self, instance, owner):
# Remove itself, to make sure that instance doesn't
# have reference to this property. Instead user should
# be able to see default value from the parent classes,
# but not allowed to assign different value in __init__
# method.
#
# Other part of functionality defined in the
# ``ConfigMeta`` class.
del self
class Property(BaseProperty):
"""
Simple and flexible class that helps identity properties with
specified type.
Parameters
----------
expected_type : object
Valid data types.
{BaseProperty.Parameters}
"""
def __init__(self, expected_type=object, *args, **kwargs):
self.expected_type = expected_type
super(Property, self).__init__(*args, **kwargs)
class ArrayProperty(BaseProperty):
"""
Numpy array or matrix property.
Parameters
----------
{BaseProperty.Parameters}
"""
expected_type = (np.ndarray, np.matrix)
class TypedListProperty(BaseProperty):
"""
List property that contains specified element types.
Parameters
----------
n_elements : int
Number of elements in the list. The ``None``
value mean that list can contains any number of
elements. Defaults to ``None``.
element_type : object or tuple
Type of the elements within the list.
{BaseProperty.Parameters}
"""
expected_type = (list, tuple)
def __init__(self, n_elements=None, element_type=int, *args, **kwargs):
self.n_elements = n_elements
self.element_type = element_type
super(TypedListProperty, self).__init__(*args, **kwargs)
def validate(self, value):
super(TypedListProperty, self).validate(value)
if self.n_elements is not None and len(value) != self.n_elements:
raise ValueError(
"Expected list with {} variables".format(self.n_elements))
if not all(isinstance(v, self.element_type) for v in value):
element_type = as_tuple(self.element_type)
type_names = (type_.__name__ for type_ in element_type)
element_type_name = ', '.join(type_names)
raise TypeError(
"The `{}` parameter received invalid element types "
"in list/tuple. Expected element types: {}, Value: {}"
"".format(self.name, element_type_name, value))
class ChoiceProperty(BaseProperty):
"""
Property that can have discrete number of properties.
Parameters
----------
choices : list, tuple or dict
Stores all possible choices. Defines list of possible
choices. If value specified as a dictionary than key
would be just an alias to the expected value.
{BaseProperty.Parameters}
"""
choices = {}
def __init__(self, choices, *args, **kwargs):
super(ChoiceProperty, self).__init__(*args, **kwargs)
self.choices = choices
if isinstance(choices, (list, tuple, set)):
self.choices = dict(zip(choices, choices))
if not isinstance(self.choices, dict):
class_name = self.choices.__class__.__name__
raise ValueError(
"Choice properties can be only a `dict`, got "
"`{0}`".format(class_name))
if not self.choices:
raise ValueError(
"Must be at least one choice in property "
"`{0}`".format(self.name))
def __set__(self, instance, value):
if value in self.choices:
return super(ChoiceProperty, self).__set__(instance, value)
possible_choices = ", ".join(self.choices.keys())
raise ValueError(
"Wrong value `{0}` for property `{1}`. Available values: "
"{2}".format(value, self.name, possible_choices)
)
def __get__(self, instance, owner):
if instance is None:
return
choice_key = super(ChoiceProperty, self).__get__(instance, owner)
return self.choices[choice_key]
class BoundedProperty(BaseProperty):
"""
Number property that have specified numerical bounds.
Parameters
----------
minval : float
Minimum possible value for the property.
maxval : float
Maximum possible value for the property.
{BaseProperty.Parameters}
"""
def __init__(self, minval=-np.inf, maxval=np.inf, *args, **kwargs):
self.minval = minval
self.maxval = maxval
super(BoundedProperty, self).__init__(*args, **kwargs)
def validate(self, value):
super(BoundedProperty, self).validate(value)
if not (self.minval <= value <= self.maxval):
raise ValueError(
"Value `{}` should be between {} and {}"
"".format(self.name, self.minval, self.maxval))
class ProperFractionProperty(BoundedProperty):
"""
Proper fraction property. Identify all possible numbers
between zero and one.
Parameters
----------
{BaseProperty.Parameters}
"""
expected_type = (float, int)
def __init__(self, *args, **kwargs):
super(ProperFractionProperty, self).__init__(
minval=0, maxval=1, *args, **kwargs)
class NumberProperty(BoundedProperty):
"""
Float or integer number property.
Parameters
----------
{BoundedProperty.Parameters}
"""
expected_type = number_type
class IntProperty(BoundedProperty):
"""
Integer property.
Parameters
----------
{BoundedProperty.Parameters}
"""
expected_type = (numbers.Integral, np.integer)
def __set__(self, instance, value):
if isinstance(value, float) and value.is_integer():
value = int(value)
super(IntProperty, self).__set__(instance, value)
class ParameterProperty(ArrayProperty):
"""
In addition to Numpy arrays and matrix property support also
Tensorfow variables and NeuPy Initializers.
Parameters
----------
{ArrayProperty.Parameters}
"""
expected_type = as_tuple(
np.ndarray,
number_type,
init.Initializer,
tf.Variable,
tf.Tensor,
)
def __set__(self, instance, value):
if isinstance(value, number_type):
value = init.Constant(value)
super(ParameterProperty, self).__set__(instance, value)
class FunctionWithOptionsProperty(ChoiceProperty):
"""
Property that helps select error function from
available or define a new one.
Parameters
----------
{ChoiceProperty.Parameters}
"""
def __set__(self, instance, value):
if inspect.isfunction(value):
return super(ChoiceProperty, self).__set__(instance, value)
return super(FunctionWithOptionsProperty, self).__set__(
instance, value)
def __get__(self, instance, value):
founded_value = super(ChoiceProperty, self).__get__(instance, value)
if inspect.isfunction(founded_value):
return founded_value
return super(FunctionWithOptionsProperty, self).__get__(
instance, founded_value)
class ScalarVariableProperty(BaseProperty):
expected_type = as_tuple(tf.Variable, number_type)
| itdxer/neupy | neupy/core/properties.py | Python | mit | 10,272 |
'''
Created on Jul 5, 2013
Updated on Jun 6, 2016 - Adding Python 3, sqlite, dotEnv, dryrun option, argparse, and reviewing code
@author: kinow
'''
from dotenv import load_dotenv
import configparser
import sqlite3
import argparse
from datetime import datetime
from pytz import timezone
import logging
import os
import re
import sys
from markmail.markmail import MarkMail
import tweepy
ERROR_EXIT_CODE=1
SUCCESS_EXIT_CODE=0
FORMAT = '%(levelname)s %(asctime)-15s %(message)s'
logging.basicConfig(format=FORMAT)
logger = logging.getLogger('markmail')
logger.setLevel(logging.DEBUG)
parser = argparse.ArgumentParser(description='markmail_consumer Twitter bot aggregator')
parser.add_argument('--dry-run', dest='dryrun', action='store_true',
help='Dry run for experimenting with the script, without updating the Twitter account')
args = parser.parse_args()
# args.dryrun
def set_last_execution_time_and_subject(subject, tweet_counter, dryrun, conn):
if dryrun == False:
date = datetime.utcnow()
c = conn.cursor()
c.execute("INSERT INTO executions(last_execution, subject, count) VALUES(?, ?, ?)", (date, subject, tweet_counter))
else:
logger.debug('DRY RUN not updating last execution time and subject')
def tweet(tweet_message, tweet_url, tweet_tags, url_length, twitter):
"""Send a tweet to Twitter if dry-run is false, otherwise just logs what it would have done"""
if twitter is not None:
logger.info('Tweeting new release: ' + tweet_message)
else:
logger.info('DRY-RUN Tweeting new release: ' + tweet_message)
# shorten message
remaining_length = 140 - (url_length + len(tweet_tags) -2) # 2 space
if len(tweet_message) > remaining_length:
tweet_message = tweet_message[:(remaining_length-3)] + '...'
tweet_body = '%s %s %s' % (tweet_message, tweet_url, tweet_tags)
if twitter is not None:
twitter.update_status(tweet_body)
def get_last_execution_time_and_subject(conn):
"""Get the last execution time and message subject. Defaults to returning the
current time, and an empty string, unless it succeeds to read the values from
database, or in case of an error, then an exception will be thrown."""
last_execution = None
subject = None
try:
last_execution = datetime.utcnow()
subject = ''
c = conn.cursor()
c.execute('SELECT last_execution AS ["timestamp"], subject, count FROM executions ORDER BY last_execution DESC LIMIT 1')
row = c.fetchone()
if row is not None:
last_execution = row[0]
subject = row[1].rstrip()
except Exception as e:
if conn != None:
conn.close()
logger.fatal('Error getting last execution time and subject')
logger.exception(e)
sys.exit(ERROR_EXIT_CODE)
last_execution = last_execution.replace(tzinfo=timezone('UTC'))
logger.debug('Last execution: ' + str(last_execution))
logger.debug('Last subject: ' + str(subject))
return (last_execution, subject)
def initialise_database():
"""Initialised the sqlite database. If non-existent, a new database and the tables will be
created."""
conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'database.sqlite'), detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
c = conn.cursor()
c.execute('''CREATE TABLE executions
(last_execution timestamp, subject TEXT, count INTEGER)''')
return conn
def get_dotenv():
"""Load configuration dotEnv file .env file"""
try:
dotenv_path = os.path.join(os.path.dirname(__file__), '.env')
load_dotenv(dotenv_path)
except Exception as e:
logger.fatal('Failed to read dotEnv file')
logger.exception(e)
sys.exit(ERROR_EXIT_CODE)
def get_config():
"""Load configuration INI file aggregator.cfg"""
config = None
try:
config = configparser.ConfigParser()
config_file_path = os.path.join(os.path.dirname(__file__), 'aggregator.cfg')
with open(config_file_path) as f:
config.readfp(f)
except Exception as e:
logger.fatal('Failed to read configuration file')
logger.exception(e)
sys.exit(ERROR_EXIT_CODE)
return config
def main():
"""Application entry point"""
logger.info('MarkMail consumer Twitter bot')
# config
logger.info('Reading configuration file')
config = get_config()
logger.info('Reading dotEnv file')
get_dotenv()
logger.info('Reading sqlite database')
conn = None
if False == os.path.exists(os.path.join(os.path.dirname(__file__), 'database.sqlite')):
conn = initialise_database()
else:
conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'database.sqlite'), detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
# last execution
logger.info('Reading last execution')
(last_execution, last_subject_used) = get_last_execution_time_and_subject(conn)
# compile pattern used for finding announcement subjects
p = re.compile('.*(\[ANN\]|\[ANNOUNCE\]|\[ANNOUNCEMENT\])(.*)\<.*', re.IGNORECASE)
logger.info('Creating MarkMail API')
# create markmail API
markmail = MarkMail()
twitter = None
if args.dryrun == False:
logger.info('Creating Twitter API')
# create twitter API
consumer_key = os.environ.get('TWITTER_CONSUMER_KEY')
consumer_secret = os.environ.get('TWITTER_CONSUMER_SECRET')
access_key = os.environ.get('TWITTER_ACCESS_KEY')
access_token = os.environ.get('TWITTER_ACCESS_TOKEN')
auth = tweepy.auth.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_token)
twitter = tweepy.API(auth)
max_pages = int(config.get('markmail', 'max_pages'))
url_length = int(config.get('twitter', 'tweet_url_length'))
tweet_counter = 0
for i in range(1, max_pages+1):
logger.info("Search MarkMail. Page: " + str(i))
try:
r = markmail.search('list%3Aorg.apache.announce+order%3Adate-backward', i)
numpages = int(r['search']['numpages'])
if numpages is None or numpages < (max_pages+1):
break
results = r['search']['results']['result']
for result in reversed(results):
subject = result['subject']
# use a regex to find [ANNOUNCE] or [ANN] and extract component/project name
m = p.match(subject)
if m:
logger.debug('New/old message found: ' + subject)
if subject == last_subject_used:
logger.debug('Skipping message. Reason: Duplicate subject found: ' + subject)
continue
try:
post_date = markmail.parse_date(result['date'])
logger.debug('New/old message date: ' + post_date.strftime("%Y-%m-%d %H:%M:%S"))
except Exception as e:
logger.fatal('Failed to parse result date: ' + str(result['date']) + '. Reason: ' + e.message)
continue
if last_execution.timestamp() > post_date.timestamp():
logger.debug('Skipping message. Reason: too old. Date: ' + str(post_date))
continue
last_subject_used = subject
# extract tweet body
tweet_message = m.group(2).strip()
tweet_url = markmail.base + result['url']
tweet_tags = '#asf #opensource #announce'
logger.debug('Composing new tweet for [' + tweet_message + ']')
tweet(tweet_message, tweet_url, tweet_tags, url_length, twitter)
tweet_counter+=1
except Exception as e:
logger.exception(e)
logger.debug('Updating execution time')
try:
set_last_execution_time_and_subject(last_subject_used, tweet_counter, args.dryrun, conn)
except Exception as e:
logger.fatal('Error setting last execution time and subject')
logger.exception(e)
sys.exit(ERROR_EXIT_CODE)
logger.info('Found ' + (str(tweet_counter)) + ' new releases')
conn.commit()
conn.close()
sys.exit(SUCCESS_EXIT_CODE)
if __name__ == '__main__':
main()
| tupilabs/asf_releases_aggregator | markmail_consumer.py | Python | apache-2.0 | 8,635 |
# *****************************************************************************
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# See NOTICE file for details.
#
# *****************************************************************************
import _jpype
from ._jinit import *
from ._jpackage import *
from ._jproxy import *
from ._core import *
from ._gui import *
from ._classpath import *
from ._jclass import *
from ._jobject import *
# There is a bug in lgtm with __init__ imports. It will be fixed next month.
from . import _jarray # lgtm [py/import-own-module]
from . import _jexception # lgtm [py/import-own-module]
from .types import *
from ._jcustomizer import *
from . import nio # lgtm [py/import-own-module]
from . import types # lgtm [py/import-own-module]
from ._jcustomizer import *
# Import all the class customizers
# Customizers are applied in the order that they are defined currently.
from . import _jmethod # lgtm [py/import-own-module]
from . import _jcollection # lgtm [py/import-own-module]
from . import _jio # lgtm [py/import-own-module]
from . import protocol # lgtm [py/import-own-module]
from . import _jthread # lgtm [py/import-own-module]
__all__ = ['java', 'javax']
__all__.extend(_jinit.__all__)
__all__.extend(_core.__all__)
__all__.extend(_classpath.__all__)
__all__.extend(types.__all__)
__all__.extend(_jproxy.__all__)
__all__.extend(_jpackage.__all__)
__all__.extend(_jclass.__all__)
__all__.extend(_jcustomizer.__all__)
__all__.extend(_gui.__all__)
__version__ = "1.2.1_dev0"
__version_info__ = __version__.split('.')
# FIXME these should be deprecated. The old JPackage system is only for
# python2 series and generates lots of deceptive classes. At some point
# these two are going to have to go away.
java = JPackage("java", strict=True)
javax = JPackage("javax", strict=True)
JMethod = _jpype._JMethod
JField = _jpype._JField
if hasattr(_jpype, 'bootstrap'):
_jpype.bootstrap()
_core.initializeResources()
| Thrameos/jpype | jpype/__init__.py | Python | apache-2.0 | 2,537 |
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import os
import sys
if sys.version_info >= (3,):
from urllib.parse import urlparse
else:
from urlparse import urlparse
from ._constants import (
SERVICE_HOST_BASE,
DEFAULT_PROTOCOL,
DEV_ACCOUNT_NAME,
DEV_ACCOUNT_KEY,
DEV_BLOB_HOST,
DEV_QUEUE_HOST,
DEV_TABLE_HOST
)
from ._error import (
_ERROR_STORAGE_MISSING_INFO,
)
_EMULATOR_ENDPOINTS = {
'blob': DEV_BLOB_HOST,
'queue': DEV_QUEUE_HOST,
'table': DEV_TABLE_HOST,
'file': '',
}
_CONNECTION_ENDPONTS = {
'blob': 'BlobEndpoint',
'queue': 'QueueEndpoint',
'table': 'TableEndpoint',
'file': 'FileEndpoint',
}
class _ServiceParameters(object):
def __init__(self, service, account_name=None, account_key=None, sas_token=None,
is_emulated=False, protocol=DEFAULT_PROTOCOL, endpoint_suffix=SERVICE_HOST_BASE,
custom_domain=None):
self.account_name = account_name
self.account_key = account_key
self.sas_token = sas_token
self.protocol = protocol or DEFAULT_PROTOCOL
if is_emulated:
self.account_name = DEV_ACCOUNT_NAME
self.protocol = 'http'
# Only set the account key if a sas_token is not present to allow sas to be used with the emulator
self.account_key = DEV_ACCOUNT_KEY if not self.sas_token else None
self.primary_endpoint = '{}/{}'.format(_EMULATOR_ENDPOINTS[service], self.account_name)
self.secondary_endpoint = '{}/{}-secondary'.format(_EMULATOR_ENDPOINTS[service], self.account_name)
else:
# Strip whitespace from the key
if self.account_key:
self.account_key = self.account_key.strip()
endpoint_suffix = endpoint_suffix or SERVICE_HOST_BASE
# Setup the primary endpoint
if custom_domain:
parsed_url = urlparse(custom_domain)
self.primary_endpoint = parsed_url.netloc + parsed_url.path
self.protocol = self.protocol if parsed_url.scheme is '' else parsed_url.scheme
else:
if not self.account_name:
raise ValueError(_ERROR_STORAGE_MISSING_INFO)
self.primary_endpoint = '{}.{}.{}'.format(self.account_name, service, endpoint_suffix)
# Setup the secondary endpoint
if self.account_name:
self.secondary_endpoint = '{}-secondary.{}.{}'.format(self.account_name, service, endpoint_suffix)
else:
self.secondary_endpoint = None
@staticmethod
def get_service_parameters(service, account_name=None, account_key=None, sas_token=None, is_emulated=None,
protocol=None, endpoint_suffix=None, custom_domain=None, request_session=None,
connection_string=None):
if connection_string:
params = _ServiceParameters._from_connection_string(connection_string, service)
elif is_emulated:
params = _ServiceParameters(service, is_emulated=True)
elif account_name:
params = _ServiceParameters(service,
account_name=account_name,
account_key=account_key,
sas_token=sas_token,
is_emulated=is_emulated,
protocol=protocol,
endpoint_suffix=endpoint_suffix,
custom_domain=custom_domain)
else:
raise ValueError(_ERROR_STORAGE_MISSING_INFO)
params.request_session = request_session
return params
@staticmethod
def _from_connection_string(connection_string, service):
# Split into key=value pairs removing empties, then split the pairs into a dict
config = dict(s.split('=', 1) for s in connection_string.split(';') if s)
# Authentication
account_name = config.get('AccountName')
account_key = config.get('AccountKey')
sas_token = config.get('SharedAccessSignature')
# Emulator
is_emulated = config.get('UseDevelopmentStorage')
# Basic URL Configuration
protocol = config.get('DefaultEndpointsProtocol')
endpoint_suffix = config.get('EndpointSuffix')
# Custom URLs
endpoint = config.get(_CONNECTION_ENDPONTS[service])
return _ServiceParameters(service,
account_name=account_name,
account_key=account_key,
sas_token=sas_token,
is_emulated=is_emulated,
protocol=protocol,
endpoint_suffix=endpoint_suffix,
custom_domain=endpoint)
| jehine-MSFT/azure-storage-python | azure/storage/_connection.py | Python | apache-2.0 | 5,691 |
import datetime
import urllib
from django.http import HttpRequest, HttpResponse
from apps.canvas_auth.models import AnonymousUser
from canvas import bgwork
from canvas.middleware import (RequestSetupMiddleware, DeferredWorkMiddleware, ExceptionLogger, ResponseGuard,
safe_middleware)
from canvas.models import Metrics
from canvas.tests.tests_helpers import CanvasTestCase, FakeRequest, create_user, create_staff, CB
from services import Services, FakeTimeProvider, with_override_service
class MockUrlopen(object):
def read(self):
return 'twisted pong'
class TestPingMiddleware(CanvasTestCase):
def setUp(self):
super(TestPingMiddleware, self).setUp()
self.urlopen = urllib.urlopen
urllib.urlopen = lambda path: MockUrlopen()
def tearDown(self):
super(TestPingMiddleware, self).tearDown()
urllib.urlopen = self.urlopen
def test_logged_out(self):
path = '/ping'
resp = self.get(path, user=AnonymousUser())
self.assertStatus(200, path)
self.assertEqual(resp.content, 'pong')
def test_logged_in(self):
path = '/ping'
resp = self.get(path)
self.assertStatus(200, path)
self.assertEqual(resp.content, 'pong')
@safe_middleware
class RecordingMiddleware(object):
def __init__(self):
self.calls = []
def process_request(self, request):
self.calls.append("process_request")
def process_response(self, request, response):
self.calls.append("process_response")
class TestSafeMiddleware(CanvasTestCase):
def test_prevents_process_response_without_corresponding_process_request(self):
rm = RecordingMiddleware()
rm.process_response(HttpRequest(), HttpResponse())
self.assertEqual(rm.calls, [])
def test_allows_process_response_if_corresponding_process_request(self):
rm = RecordingMiddleware()
request = HttpRequest()
rm.process_request(request)
rm.process_response(request, HttpResponse())
self.assertEqual(rm.calls, ['process_request', 'process_response'])
def test_prevents_process_response_if_process_request_raises_exception(self):
calls = []
class ExpectedException(Exception): pass
@safe_middleware
class NaughtyMiddleware(object):
def process_request(self, request):
raise ExpectedException()
def process_response(self, request, response):
calls.append("process_response")
nm = NaughtyMiddleware()
request = HttpRequest()
with self.assertRaises(ExpectedException):
nm.process_request(request)
nm.process_response(request, HttpResponse())
self.assertEqual(calls, [])
class TestResponseGuard(CanvasTestCase):
def test_raise_TypeError_if_response_is_not_http_response(self):
response = []
mw = ResponseGuard()
self.assertRaises(TypeError, lambda: mw.process_response(FakeRequest(None), response))
def test_through_full_django_stack(self):
self.assertRaises(TypeError, lambda: self.post("/staff/noop", user=create_staff()))
class TestRequestSetupMiddleware(CanvasTestCase):
@with_override_service('time', FakeTimeProvider)
def assertViewCount(self, request, response, count):
now_dt = datetime.datetime.fromtimestamp(Services.time.time())
bgwork.clear()
RequestSetupMiddleware().process_request(request)
RequestSetupMiddleware().process_response(request, response)
view_previous = Metrics.view.daily_count(now_dt)
bgwork.perform()
view_current = Metrics.view.daily_count(now_dt)
self.assertEqual(view_current - view_previous, count)
def test_pageview_records_view_metric(self):
self.assertViewCount(FakeRequest(create_user(), path="/user/foobar"), HttpResponse(status=200), 1)
def test_api_does_not_record_view_metric(self):
self.assertViewCount(FakeRequest(create_user(), path="/api/do_stuff"), HttpResponse(status=200), 0)
class TestDeferredWorkMiddleware(CanvasTestCase):
def test_deferred_method_called_on_success(self):
dwm = DeferredWorkMiddleware()
request = HttpRequest()
cb = CB()
dwm.process_request(request)
request.on_success.defer(cb)
dwm.process_response(request, HttpResponse(status=200))
self.assertEqual(cb.called, 0)
bgwork.perform()
self.assertEqual(cb.called, 1)
def test_deferred_method_not_called_on_failure(self):
dwm = DeferredWorkMiddleware()
request = HttpRequest()
cb = CB()
dwm.process_request(request)
request.on_success.defer(cb)
dwm.process_response(request, HttpResponse(status=500))
bgwork.perform()
self.assertEqual(cb.called, 0)
| canvasnetworks/canvas | website/canvas/tests/test_middleware.py | Python | bsd-3-clause | 4,965 |
from mock import MagicMock, patch
from nose.plugins.attrib import attr
from ion.agents.data.handlers.handler_utils import _get_type, list_file_info, \
list_file_info_http, list_file_info_ftp, list_file_info_fs, \
get_time_from_filename, calculate_iteration_count, get_sbuffer
from pyon.util.unit_test import PyonTestCase
import requests
from ftplib import FTP
from StringIO import StringIO
@attr('UNIT', group='eoi')
class TestHandlerUtils(PyonTestCase):
def test__get_type_http(self):
self.assertEqual(_get_type('http://'), 'http')
def test__get_type_ftp(self):
self.assertEqual(_get_type('ftp://'), 'ftp')
def test__get_type_fs(self):
self.assertEqual(_get_type(''), 'fs')
@patch('ion.agents.data.handlers.handler_utils._get_type')
@patch('ion.agents.data.handlers.handler_utils.list_file_info_http')
def test_list_file_info_by_http(self, list_file_info_http_mock, _get_type_mock):
_get_type_mock.return_value = 'http'
list_file_info_http_mock.return_value = ['file1', 'file2']
self.assertEqual(list_file_info('http', 'pattern'), ['file1', 'file2'])
@patch('ion.agents.data.handlers.handler_utils._get_type')
@patch('ion.agents.data.handlers.handler_utils.list_file_info_ftp')
def test_list_file_info_by_ftp(self, list_file_info_ftp_mock, _get_type_mock):
_get_type_mock.return_value = 'ftp'
list_file_info_ftp_mock.return_value = ['file1', 'file2']
self.assertEqual(list_file_info('ftp', 'pattern'), ['file1', 'file2'])
@patch('ion.agents.data.handlers.handler_utils._get_type')
@patch('ion.agents.data.handlers.handler_utils.list_file_info_fs')
def test_list_file_info_by_fs(self, list_file_info_fs_mock, _get_type_mock):
_get_type_mock.return_value = 'fs'
list_file_info_fs_mock.return_value = ['file1', 'file2']
self.assertEqual(list_file_info('fs', 'pattern'), ['file1', 'file2'])
@patch('ion.agents.data.handlers.handler_utils.re.findall')
@patch('ion.agents.data.handlers.handler_utils.requests.get')
def test_list_file_info_http(self, requests_mock, re_mock):
retval = MagicMock(spec=requests.models.Response)
retval.url = 'http://marine.rutgers.edu/cool/maracoos/codar/ooi/radials/BELM/'
retval.content = '<http><body>' \
'<a href="RDLm_BELM_2012_08_14_1200.ruv">RDLm_BELM_2012_08_14_1200.ruv</a> ' \
'14-Aug-2012 08:42 88K \n<img src="/icons/unknown.gif" alt="[ ]"> ' \
'<a href="RDLm_BELM_2012_08_14_1300.ruv">RDLm_BELM_2012_08_14_1300.ruv</a> ' \
'14-Aug-2012 09:41 90K \n</body></html>'
requests_mock.return_value = retval
re_mock.return_value = ['RDLm_BELM_2012_08_14_1200.ruv', 'RDLm_BELM_2012_08_14_1300.ruv']
lst = [('http://marine.rutgers.edu/cool/maracoos/codar/ooi/radials/BELM/RDLm_BELM_2012_08_14_1200.ruv',),
('http://marine.rutgers.edu/cool/maracoos/codar/ooi/radials/BELM/RDLm_BELM_2012_08_14_1300.ruv',)]
self.assertEqual(list_file_info_http(base='http://marine.rutgers.edu/cool/maracoos/codar/ooi/radials/BELM/', pattern='*.ruv'), lst)
@patch('ion.agents.data.handlers.handler_utils.FTP')
def test_list_file_info_ftp(self, ftp_mock):
retval = MagicMock(spec=FTP)
retval.nlst.return_value = ['RDLm_BELM_2012_08_14_1200.ruv', 'RDLm_BELM_2012_08_14_1300.ruv']
ftp_mock.return_value = retval
lst = ['RDLm_BELM_2012_08_14_1200.ruv',
'RDLm_BELM_2012_08_14_1300.ruv']
self.assertEqual(list_file_info_ftp(base='ftp://marine.rutgers.edu/cool/maracoos/codar/ooi/radials/BELM/', pattern='*.ruv'), lst)
@patch('ion.agents.data.handlers.handler_utils.glob.glob')
@patch('ion.agents.data.handlers.handler_utils.os.path.getmtime')
@patch('ion.agents.data.handlers.handler_utils.os.path.getsize')
@patch('ion.agents.data.handlers.handler_utils.os.path.isdir')
@patch('ion.agents.data.handlers.handler_utils.os.path.exists')
def test_list_file_info_fs(self, exists_mock, isdir_mock, getsize_mock, getmtime_mock, glob_mock):
exists_mock.return_value = True
isdir_mock.return_value = True
getsize_mock.return_value = 100
getmtime_mock.return_value = 1313712000
lst1 = ['RDLm_BELM_2012_08_14_1200.ruv',
'RDLm_BELM_2012_08_14_1300.ruv']
glob_mock.return_value = lst1
lst2 = [('RDLm_BELM_2012_08_14_1200.ruv', 1313712000, 100, 0),
('RDLm_BELM_2012_08_14_1300.ruv', 1313712000, 100, 0)]
self.assertEqual(list_file_info_fs(base='test_data/ruv', pattern='*.ruv'), lst2)
@patch('ion.agents.data.handlers.handler_utils.os.path.exists')
def test_list_file_info_fs_exists_false(self, exists_mock):
exists_mock.return_value = False
with self.assertRaises(StandardError) as cm:
list_file_info_fs(base='test_data/ruv', pattern='*.ruv')
ex = cm.exception
self.assertEqual(ex.message, 'base \'test_data/ruv\' does not exist')
@patch('ion.agents.data.handlers.handler_utils.os.path.isdir')
@patch('ion.agents.data.handlers.handler_utils.os.path.exists')
def test_list_file_info_fs_isdir_false(self, exists_mock, isdir_mock):
exists_mock.return_value = True
isdir_mock.return_value = False
with self.assertRaises(StandardError) as cm:
list_file_info_fs(base='test_data/ruv', pattern='*.ruv')
ex = cm.exception
self.assertEqual(ex.message, 'base \'test_data/ruv\' is not a directory')
@patch('ion.agents.data.handlers.handler_utils.time.mktime')
@patch('ion.agents.data.handlers.handler_utils.re.match')
@patch('ion.agents.data.handlers.handler_utils.os.path.basename')
def test_get_time_from_filename(self, basename_mock, re_mock, mktime_mock):
basename_mock.return_value = 'test_data/ruv'
retval = MagicMock()
retval.groups.return_value = ('2012', '06', '06', '12', '00')
re_mock.return_value = retval
mktime_mock.return_value = 1338998400.0
self.assertEqual(get_time_from_filename(file_name='test_data/ruv/RDLm_SEAB_2012_06_06_1200.ruv',
date_extraction_pattern='RDLm_SEAB_([\d]{4})_([\d]{2})_([\d]{2})_([\d]{2})([\d]{2}).ruv',
date_pattern='%Y %m %d %H %M'), 1338998400.0)
def test_calculate_iteration_count(self):
total_recs = 100
max_rec = 10
self.assertEqual(calculate_iteration_count(total_recs=total_recs, max_rec=max_rec), 10)
def test_calculate_iteration_count_not_even(self):
total_recs = 101
max_rec = 10
self.assertEqual(calculate_iteration_count(total_recs=total_recs, max_rec=max_rec), 11)
@patch('ion.agents.data.handlers.handler_utils.StringIO')
@patch('ion.agents.data.handlers.handler_utils._get_type')
@patch('ion.agents.data.handlers.handler_utils.requests.get')
def test_get_sbuffer_http(self, requests_mock, get_type_mock, StringIO_mock):
retval = MagicMock(spec=requests.models.Response)
retval.url = 'http://marine.rutgers.edu/cool/maracoos/codar/ooi/radials/BELM/'
retval.content = '<http><body>'\
'<a href="RDLm_BELM_2012_08_14_1200.ruv">RDLm_BELM_2012_08_14_1200.ruv</a> '\
'14-Aug-2012 08:42 88K \n<img src="/icons/unknown.gif" alt="[ ]"> '\
'<a href="RDLm_BELM_2012_08_14_1300.ruv">RDLm_BELM_2012_08_14_1300.ruv</a> '\
'14-Aug-2012 09:41 90K \n</body></html>'
requests_mock.return_value = retval
get_type_mock.return_value = 'http'
StringIO_mock.return_value = MagicMock(spec=StringIO)
self.assertTrue(isinstance(get_sbuffer(url=retval.url), StringIO))
def test_get_sbuffer_ftp(self):
with self.assertRaises(NotImplementedError):
get_sbuffer(url='http://marine.rutgers.edu/cool/maracoos/codar/ooi/radials/BELM/', type='ftp')
| ooici/coi-services | ion/agents/data/handlers/test/test_handler_utils.py | Python | bsd-2-clause | 8,108 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.