code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
"""
WSGI config for api project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# api directory.
app_path = os.path.dirname(os.path.abspath(__file__)).replace('/config', '')
sys.path.append(os.path.join(app_path, 'api'))
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production':
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production':
application = Sentry(application)
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
urfonline/api
|
config/wsgi.py
|
Python
|
mit
| 1,914
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-12-09 01:29
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("hordak", "0007_auto_20161209_0111")]
operations = [
migrations.RenameField("Account", "has_statements", "is_bank_account")
]
|
adamcharnock/django-hordak
|
hordak/migrations/0008_auto_20161209_0129.py
|
Python
|
mit
| 353
|
# -*- coding: utf-8; -*-
#
# @file sequences
# @brief collgate
# @author Frédéric SCHERMA (INRA UMR1095)
# @date 2018-01-09
# @copyright Copyright (c) 2018 INRA/CIRAD
# @license MIT (see LICENSE file)
# @details
def fixture(fixture_manager, factory_manager):
acc_seq = "CREATE SEQUENCE IF NOT EXISTS accession_naming_seq START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1;"
bat_seq = "CREATE SEQUENCE IF NOT EXISTS batch_naming_seq START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1;"
from django.db import connection
with connection.cursor() as cursor:
cursor.execute(acc_seq)
cursor.execute(bat_seq)
|
coll-gate/collgate
|
server/accession/fixtures/sequences.py
|
Python
|
mit
| 661
|
"""
Django settings for lark project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from .base import *
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')i4@2vfr##+zd3cn8ckw#!lebya1mk2sg@yq9boog+=ofi@hf9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Database
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'filters': {
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
# 'filters': ['special']
}
},
'loggers': {
'django': {
'handlers': ['null'],
'propagate': True,
'level': 'INFO',
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'music': {
'handlers': ['console'],
'level': 'DEBUG',
# 'filters': ['special']
},
}
}
|
mozillazg/lark
|
lark/lark/settings_dev.py
|
Python
|
mit
| 2,143
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('fastapp', '0010_auto_20150910_2010'),
]
operations = [
migrations.AddField(
model_name='thread',
name='updated',
field=models.DateTimeField(auto_now=True, null=True),
preserve_default=True,
),
]
|
sahlinet/fastapp
|
fastapp/migrations/0011_thread_updated.py
|
Python
|
mit
| 451
|
class ClopureSyntaxError(Exception):
def __init__(self, *args, pos=0, **kwargs):
super().__init__(*args, **kwargs)
self.pos = pos
class ClopureRuntimeError(Exception):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
|
vbkaisetsu/clopure
|
clopure/exceptions.py
|
Python
|
mit
| 273
|
from datetime import datetime, timedelta
import json
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from django_dynamic_fixture import G, N
from freezegun import freeze_time
from mock import patch
from issue.models import (
Assertion, ExtendedEnum, Issue, IssueAction, IssueStatus, ModelAssertion, ModelIssue, Responder, ResponderAction,
load_function,
)
from issue.tests.models import TestModel
def function_to_load(self, *args, **kwargs):
pass
def is_even_number(record):
return ((int(record.name) % 2) == 0, {})
class LoadFucntionTests(TestCase):
def test_load_class_instance(self):
func = load_function('issue.tests.model_tests.function_to_load')
self.assertEqual(func, function_to_load)
class ExtendedEnumTests(TestCase):
class TestEnum(ExtendedEnum):
red = 1
blue = 3
green = 2
def test_name_to_value(self):
self.assertEqual(2, ExtendedEnumTests.TestEnum.name_to_value('green'))
def test_choices(self):
self.assertEqual(
set([(1, 'red'), (2, 'green'), (3, 'blue')]), set(ExtendedEnumTests.TestEnum.choices()))
class IssueManagerTests(TestCase):
def test_get_open_issues(self):
i = G(Issue)
G(Issue, status=IssueStatus.Resolved.value)
i3 = G(Issue)
self.assertEqual(set(Issue.objects.get_open_issues()), set([i, i3]))
def test_reopen_issue(self):
mi = G(Issue, status=IssueStatus.Resolved.value)
Issue.objects.reopen_issue(name=mi.name)
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(pk=mi.pk).status)
def test_is_wont_fix(self):
mi = G(Issue, status=IssueStatus.Wont_fix.value)
self.assertTrue(Issue.objects.is_wont_fix(name=mi.name))
def test_maybe_open_issue_when_none_exists(self):
"""
Verify that maybe_open_issue will create a new Issue when none like it exists.
"""
(issue, created) = Issue.objects.maybe_open_issue(name='falafel')
self.assertTrue(created)
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(name=issue.name).status)
def test_maybe_open_issue_when_it_is_marked_as_wont_fix(self):
"""
Verify that maybe_open_issue will not create or return an Issue when it exists and
is marked as WONT_FIX.
"""
issue = G(Issue, status=IssueStatus.Wont_fix.value)
self.assertEqual((None, False), Issue.objects.maybe_open_issue(name=issue.name))
self.assertEqual(IssueStatus.Wont_fix.value, Issue.objects.get(pk=issue.pk).status)
self.assertEqual(1, Issue.objects.filter(name=issue.name).count())
def test_maybe_open_issue_returns_already_open_issue(self):
"""
Verify that maybe_open_issue will return a the extant Issue of hte provided name
when it is open.
"""
issue = G(Issue, status=IssueStatus.Open.value)
(issue2, created) = Issue.objects.maybe_open_issue(name=issue.name)
self.assertFalse(created)
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(pk=issue.pk).status)
self.assertEqual(1, Issue.objects.filter(name=issue.name).count())
def maybe_open_issue_when_it_is_marked_as_resolved(self):
"""
Verify that maybe_open_issue will create a new issue when a Resolved one
exists with the same name.
"""
issue = G(Issue, status=IssueStatus.Resolved.value)
(issue2, created) = Issue.objects.maybe_open_issue(name=issue.name)
self.assertTrue(created)
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(pk=issue2.pk).status)
self.assertEqual(2, Issue.objects.get(name=issue2.name))
def test_resolve_open_issue(self):
a = G(Assertion)
issue = G(Issue, name=a.name, status=IssueStatus.Open.value)
a._resolve_open_issue()
self.assertEqual(IssueStatus.Resolved.value, Issue.objects.get(pk=issue.pk).status)
class ModelIssueManagerTests(TestCase):
def test_replace_record_with_content_type(self):
record = N(TestModel)
kwargs = {
'record': record,
}
expected_kwargs = {
'record_id': record.id,
'record_type': ContentType.objects.get_for_model(record),
}
self.assertEqual(
expected_kwargs, ModelIssue.objects._replace_record_with_content_type(kwargs))
def test_replace_record_with_content_type_with_no_record(self):
self.assertEqual({}, ModelIssue.objects._replace_record_with_content_type({}))
def test_reopen_issue(self):
record = G(TestModel)
mi = G(
ModelIssue, record_id=record.id, record_type=ContentType.objects.get_for_model(record),
status=IssueStatus.Resolved.value)
ModelIssue.objects.reopen_issue(name=mi.name, record=mi.record)
self.assertEqual(IssueStatus.Open.value, ModelIssue.objects.get(pk=mi.pk).status)
def test_is_wont_fix(self):
record = G(TestModel)
mi = G(
ModelIssue, record_id=record.id, record_type=ContentType.objects.get_for_model(record),
status=IssueStatus.Wont_fix.value)
self.assertTrue(ModelIssue.objects.is_wont_fix(name=mi.name, record=mi.record))
class IssueTests(TestCase):
def test__str__(self):
i = Issue(name='an-issue', status=IssueStatus.Resolved.value)
self.assertEqual('Issue: an-issue - IssueStatus.Resolved', str(i))
def test__is_open(self):
i = N(Issue, status=IssueStatus.Open.value)
self.assertTrue(i.is_open)
self.assertFalse(i.is_resolved)
self.assertFalse(i.is_wont_fix)
def test__is_resolved(self):
i = N(Issue, status=IssueStatus.Resolved.value)
self.assertTrue(i.is_resolved)
self.assertFalse(i.is_open)
self.assertFalse(i.is_wont_fix)
def test__is_wont_fix(self):
i = N(Issue, status=IssueStatus.Wont_fix.value)
self.assertTrue(i.is_wont_fix)
self.assertFalse(i.is_resolved)
self.assertFalse(i.is_open)
class IssueActionTests(TestCase):
def test__str__(self):
ia = N(IssueAction)
self.assertEqual(
'IssueResponse: {self.issue.name} - {self.responder_action} - '
'{self.success} at {self.execution_time}'.format(self=ia),
str(ia)
)
class ResponderTests(TestCase):
def test__str__(self):
self.assertEqual(
'Responder: error-.*',
str(Responder(watch_pattern='error-.*'))
)
@patch('issue.models.load_function', spec_set=True)
def test_respond(self, load_function):
# Setup the scenario
target_function = 'do'
issue = G(Issue, name='error-42')
responder = G(Responder, issue=issue, watch_pattern='error-\d+')
G(ResponderAction, responder=responder, target_function=target_function, delay_sec=0)
# Run the code
r = responder.respond(issue)
# Verify expectations
self.assertTrue(r)
load_function.assert_called_with(target_function)
@patch('issue.models.load_function', spec_set=True)
def test_respond_ignores_non_watching_pattern(self, load_function):
# Setup the scenario
issue = G(Issue, name='success')
responder = G(Responder, issue=issue, watch_pattern='error-\d+')
G(ResponderAction, responder=responder, target_function='do')
# Run the code
r = responder.respond(issue)
# Verify expectations
self.assertFalse(r)
self.assertFalse(load_function.called)
def test__match(self):
r = Responder(watch_pattern='error-.*')
self.assertTrue(r._match('error-42'))
self.assertFalse(r._match('success'))
def test__get_pending_actions_for_issue(self):
# Setup the scenario
now = datetime(2014, 8, 11, 15, 0, 0)
delta = timedelta(minutes=30)
r = G(Responder)
ra = G(ResponderAction, responder=r, delay_sec=delta.total_seconds())
issue = G(Issue, creation_time=now - (delta * 2))
# Run the code and verify expectation
self.assertEqual(ra, r._get_pending_actions_for_issue(issue).get())
def test__get_pending_actions_for_issue_ignores_executed_actions(self):
# Setup the scenario
now = datetime(2014, 8, 11, 15, 0, 0)
delta = timedelta(minutes=30)
r = G(Responder)
ra = G(ResponderAction, responder=r, delay_sec=delta.total_seconds())
issue = G(Issue, creation_time=now - (delta * 2))
G(IssueAction, issue=issue, responder_action=ra)
# Run the code and verify expectation
self.assertFalse(r._get_pending_actions_for_issue(issue).exists())
@patch('issue.models.load_function', spec_set=True)
def test__execute_all_success(self, load_function):
# Setup the scenario
issue = G(Issue)
responder = G(Responder, issue=issue)
# Note: we don't care what the target_function path is since we patch the load_function function
ra = G(ResponderAction, responder=responder, delay_sec=0)
ra2 = G(ResponderAction, responder=responder, delay_sec=0)
ra3 = G(ResponderAction, responder=responder, delay_sec=0)
self.do_call_time = None
self.do_2_call_time = None
self.do_3_call_time = None
def do_1(*args, **kwargs):
self.do_call_time = datetime.utcnow()
return True
def do_2(*args, **kwargs):
self.do_2_call_time = datetime.utcnow()
return True
def do_3(*args, **kwargs):
self.do_3_call_time = datetime.utcnow()
return True
load_function.side_effect = [do_1, do_2, do_3]
# Run the code
responder._execute(issue)
# Verify expectations
self.assertTrue(self.do_call_time < self.do_2_call_time)
self.assertTrue(self.do_2_call_time < self.do_3_call_time)
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra).exists())
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra2).exists())
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra3).exists())
@patch('issue.models.load_function', spec_set=True)
def test__execute_stops_when_some_actions_are_not_yet_executable(self, load_function):
# Setup the scenario
delta = timedelta(seconds=30)
issue = G(Issue, creation_time=datetime.utcnow() - (2 * delta))
responder = G(Responder, issue=issue)
ra = G(ResponderAction, responder=responder, delay_sec=0, target_function='do_1')
ra2 = G(ResponderAction, responder=responder, delay_sec=0, target_function='do_2')
ra3 = G(ResponderAction, responder=responder, delay_sec=30, target_function='do_3')
self.do_call_time = None
self.do_2_call_time = None
self.do_3_call_time = None
def do_1(*args, **kwargs):
self.do_call_time = datetime.utcnow()
return True
def do_2(*args, **kwargs):
self.do_2_call_time = datetime.utcnow()
return True
def do_3(*args, **kwargs):
self.do_3_call_time = datetime.utcnow()
return True
load_function.side_effect = lambda tf: {'do_1': do_1, 'do_2': do_2}[tf]
# Run the code
responder._execute(issue)
# Verify expectations
self.assertTrue(self.do_call_time < self.do_2_call_time)
self.assertIsNone(self.do_3_call_time)
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra).exists())
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra2).exists())
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra3).exists())
@freeze_time(datetime(2014, 8, 13, 12))
@patch('issue.models.load_function', spec_set=True)
def test__execute_resumes_after_sufficient_time(self, load_function):
# Setup the scenario
delta = timedelta(seconds=30)
issue = G(Issue, creation_time=datetime.utcnow() - (2 * delta))
responder = G(Responder, issue=issue)
ra = G(ResponderAction, responder=responder, delay_sec=0, target_function='do_1')
ra2 = G(ResponderAction, responder=responder, delay_sec=delta.total_seconds(), target_function='do_2')
G(IssueAction, issue=issue, responder_action=ra)
self.do_called = False
self.do_2_called = False
def do_1(*args, **kwargs):
self.do_called = True
return True
def do_2(*args, **kwargs):
self.do_2_called = True
return True
load_function.side_effect = lambda tf: {'do_1': do_1, 'do_2': do_2}[tf]
# Run the code
responder._execute(issue)
# Verify expectations
self.assertFalse(self.do_called)
self.assertTrue(self.do_2_called)
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra).exists())
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra2).exists())
@patch('issue.models.load_function', spec_set=True)
def test__execute_failure_does_not_stop_other_actions(self, load_function):
# Setup the scenario
delta = timedelta(seconds=30)
issue = G(Issue, creation_time=datetime.utcnow() - (2 * delta))
responder = G(Responder, issue=issue)
# Note: we don't care what the target_function path is since we patch the load_function function
ra = G(ResponderAction, responder=responder, delay_sec=0)
ra2 = G(ResponderAction, responder=responder, delay_sec=0)
ra3 = G(ResponderAction, responder=responder, delay_sec=30)
self.do_call_time = None
self.do_2_call_time = None
self.do_3_call_time = None
def do_1(*args, **kwargs):
self.do_call_time = datetime.utcnow()
return None
def do_2(*args, **kwargs):
self.do_2_call_time = datetime.utcnow()
raise Exception('what-an-exceptional-message')
def do_3(*args, **kwargs):
self.do_3_call_time = datetime.utcnow()
return None
load_function.side_effect = [do_1, do_2, do_3]
# Run the code
responder._execute(issue)
# Verify expectations
self.assertTrue(self.do_call_time < self.do_2_call_time)
self.assertTrue(self.do_2_call_time < self.do_3_call_time)
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra).exists())
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra2).exists())
self.assertTrue(IssueAction.objects.filter(issue=issue, responder_action=ra3).exists())
self.assertEqual(
json.dumps(str(Exception('what-an-exceptional-message'))),
IssueAction.objects.get(issue=issue, responder_action=ra2).details)
class ResponderActionTests(TestCase):
def test__str__(self):
r = G(ResponderAction)
self.assertEqual(
'ResponderAction: {responder} - {target_function} - {function_kwargs}'.format(
responder=r.responder, target_function=r.target_function, function_kwargs=r.function_kwargs),
str(r)
)
def test_is_time_to_execute(self):
# Setup the scenario
now = datetime(2014, 8, 11, 15, 0, 0)
delta = timedelta(minutes=30)
ra = G(ResponderAction, delay_sec=delta.total_seconds())
# Run the code and verify expectation
issue = N(Issue, creation_time=now - (delta * 2))
with freeze_time(now):
self.assertTrue(ra.is_time_to_execute(issue))
def test_is_time_to_execute_when_not_enough_time_has_passed(self):
# Setup the scenario
now = datetime(2014, 8, 11, 15, 0, 0)
delta = timedelta(minutes=30)
ra = G(ResponderAction, delay_sec=delta.total_seconds())
# Run the code and verify expectation
issue = N(Issue, creation_time=now - (delta / 2))
with freeze_time(now):
self.assertFalse(ra.is_time_to_execute(issue))
@patch('issue.models.load_function', spec_set=True)
def test_execute(self, load_function):
# Setup the scenario
target_function = 'do'
issue = G(Issue)
r = G(ResponderAction, target_function=target_function, function_kwargs={'foo': 'bar'})
now = datetime(2014, 8, 11, 15, 0, 0)
self.assertEqual(0, IssueAction.objects.count())
load_function.return_value.return_value = None
# Run the code
with freeze_time(now):
ia = r.execute(issue)
ia.save()
self.assertTrue(isinstance(ia, IssueAction))
# Verify expectations
expected_issue_action_kwargs = {
'success': True,
'execution_time': now,
'responder_action': r,
}
load_function.assert_called_with(target_function)
load_function.return_value.assert_called_with(issue, foo='bar')
self.assertTrue(IssueAction.objects.filter(issue=issue, **expected_issue_action_kwargs).exists())
# The 'None' that is stored as the details is first json encoded
self.assertEqual(json.dumps(None), IssueAction.objects.get().details)
@patch('issue.models.load_function', spec_set=True)
def test_execute_with_failure(self, load_function):
# Setup the scenario
target_function = 'fail'
issue = G(Issue)
r = G(ResponderAction, target_function=target_function, function_kwargs={'foo': 'bar'})
now = datetime(2014, 8, 11, 15, 0, 0)
self.assertEqual(0, IssueAction.objects.count())
load_function.return_value.side_effect = Exception('what-an-exceptional-message')
# Run the code
with freeze_time(now):
ia = r.execute(issue)
ia.save()
self.assertTrue(isinstance(ia, IssueAction))
# Verify expectations
expected_issue_action_kwargs = {
'success': False,
'execution_time': now,
'responder_action': r,
}
load_function.assert_called_with(target_function)
load_function.return_value.assert_called_with(issue, foo='bar')
self.assertTrue(IssueAction.objects.filter(issue=issue, **expected_issue_action_kwargs).exists())
self.assertEqual(json.dumps(str(Exception('what-an-exceptional-message'))), IssueAction.objects.get().details)
class AssertionTests(TestCase):
@patch.object(Assertion, '_resolve_open_issue', spec_set=True)
@patch('issue.models.load_function', spec_set=True)
def test_check_when_all_is_well(self, load_function, resolve_open_issue):
issue_details = {
'narg': 'baz',
}
load_function.return_value.return_value = (True, issue_details)
assertion = G(Assertion, check_function='issue.tests.model_tests.load_function')
self.assertTrue(assertion.check_assertion())
self.assertTrue(resolve_open_issue.called)
@patch.object(Assertion, '_open_or_update_issue', spec_set=True)
@patch('issue.models.load_function', spec_set=True)
def test_check_when_all_is_not_well(self, load_function, open_or_update_issue):
issue_details = {
'narg': 'baz',
}
load_function.return_value.return_value = (False, issue_details)
assertion = G(Assertion, check_function='issue.tests.model_tests.load_function')
self.assertFalse(assertion.check_assertion())
open_or_update_issue.assert_called_with(details=issue_details)
def test__open_or_update_issue_when_none_exists(self):
a = G(Assertion)
a._open_or_update_issue({})
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(name=a.name).status)
def test__open_or_update_issue_when_it_is_marked_as_wont_fix(self):
a = G(Assertion)
issue = G(Issue, name=a.name, status=IssueStatus.Wont_fix.value)
a._open_or_update_issue({})
self.assertEqual(IssueStatus.Wont_fix.value, Issue.objects.get(pk=issue.pk).status)
def test__open_or_update_issue_when_it_is_marked_as_resolved(self):
a = G(Assertion)
G(Issue, name=a.name, status=IssueStatus.Resolved.value)
issue2 = a._open_or_update_issue({})
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(pk=issue2.pk).status)
def test_resolve_open_issue(self):
a = G(Assertion)
issue = G(Issue, name=a.name, status=IssueStatus.Open.value)
a._resolve_open_issue()
self.assertEqual(IssueStatus.Resolved.value, Issue.objects.get(pk=issue.pk).status)
class ModelAssertionTests(TestCase):
def test_queryset(self):
am = G(TestModel)
am2 = G(TestModel)
ma = N(ModelAssertion, model_type=ContentType.objects.get_for_model(TestModel))
self.assertEqual(set(ma.queryset), set([am, am2]))
def test_check_all_pass(self):
G(TestModel, name='0')
G(TestModel, name='2')
G(TestModel, name='4')
ma = N(
ModelAssertion, model_type=ContentType.objects.get_for_model(TestModel),
check_function='issue.tests.model_tests.is_even_number')
# Run the code
r = ma.check_assertion()
# Verify expectations
self.assertTrue(r)
self.assertEqual(0, ModelIssue.objects.count())
def test_check_one_fails(self):
am1 = G(TestModel, name='1')
G(Issue, name='0')
G(Issue, name='1')
ma = N(
ModelAssertion, model_type=ContentType.objects.get_for_model(TestModel),
check_function='issue.tests.model_tests.is_even_number')
# Run the code
r = ma.check_assertion()
# Verify expectations
self.assertFalse(r)
self.assertEqual(1, ModelIssue.objects.count())
self.assertTrue(
ModelIssue.objects.filter(
record_id=am1.id, record_type=ContentType.objects.get_for_model(TestModel)).exists())
|
wesleykendall/django-issue
|
issue/tests/model_tests.py
|
Python
|
mit
| 22,256
|
class TooManyMissingFrames(Exception):
pass
class InvalidDuration(Exception):
pass
class InvalidTag(Exception):
pass
class InvalidID3TagVersion(Exception):
pass
class CouldntDecodeError(Exception):
pass
|
cbelth/pyMusic
|
pydub/exceptions.py
|
Python
|
mit
| 233
|
#! /usr/bin/env python
import os
import sys
import shutil
import logging
import argparse
import tempfile
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from webloader.phantomjs_loader import PhantomJSLoader
from webloader.curl_loader import CurlLoader
from webloader.pythonrequests_loader import PythonRequestsLoader
from webloader.chrome_loader import ChromeLoader
#from webloader.firefox_loader import FirefoxLoader
def get_loader(**kwargs):
loader = args.loader.lower()
if 'python' in loader or 'requests' in loader:
return PythonRequestsLoader(full_page=False, **kwargs)
elif 'curl' in loader:
return CurlLoader(full_page=False, **kwargs)
elif 'phantom' in loader or 'js' in loader:
return PhantomJSLoader(**kwargs)
elif 'chrome' in loader:
return ChromeLoader(**kwargs)
elif 'firefox' in loader:
return FirefoxLoader(**kwargs)
else:
logging.error('Unknown loader: %s', args.loader)
sys.exit(-1)
def main():
outdir = os.path.join(tempfile.gettempdir(), 'loader-test')
loader = get_loader(outdir=outdir)
loader.load_pages([args.url])
print loader.page_results
print loader.load_results
try:
shutil.rmtree(outdir)
except:
pass
if __name__ == "__main__":
# set up command line args
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,\
description='Test a loader by requesting a single URL.')
parser.add_argument('url', help='The URL to load.')
parser.add_argument('-l', '--loader', default="python-requests", help='The loader to test.')
parser.add_argument('-q', '--quiet', action='store_true', default=False, help='only print errors')
parser.add_argument('-v', '--verbose', action='store_true', default=False, help='print debug info. --quiet wins if both are present')
args = parser.parse_args()
# set up logging
if args.quiet:
level = logging.WARNING
elif args.verbose:
level = logging.DEBUG
else:
level = logging.INFO
config = {
'format' : "%(levelname) -10s %(asctime)s %(module)s:%(lineno) -7s %(message)s",
'level' : level
}
logging.basicConfig(**config)
main()
|
dtnaylor/web-profiler
|
tools/test.py
|
Python
|
mit
| 2,306
|
# topics.serializers
# Serializers for the topic and voting models.
#
# Author: Benjamin Bengfort <bbengfort@districtdatalabs.com>
# Created: Wed Sep 09 09:34:46 2015 -0400
#
# Copyright (C) 2015 District Data Labs
# For license information, see LICENSE.txt
#
# ID: serializers.py [] benjamin@bengfort.com $
"""
Serializers for the topic and voting models.
"""
##########################################################################
## Imports
##########################################################################
from topics.models import Topic, Vote
from rest_framework import serializers
##########################################################################
## Validators
##########################################################################
class InRange(object):
"""
Validator that specifies a value must be in a particular range
"""
def __init__(self, low, high):
self.low = low
self.high = high
def __call__(self, value):
if value > self.high or value < self.low:
raise serializers.ValidationError(
"value must be between %d and %d (inclusive)" % (self.low, self.high)
)
##########################################################################
## Serializers
##########################################################################
class TopicSerializer(serializers.HyperlinkedModelSerializer):
"""
Serializers topics and their weights.
"""
class Meta:
model = Topic
fields = ('url', 'title', 'vote_total',)
extra_kwargs = {
'url': {'view_name': 'api:topic-detail',},
}
class VotingSerializer(serializers.Serializer):
"""
Serializes incoming votes.
Note: There is no model associated with this serializer
"""
vote = serializers.IntegerField(validators=[InRange(-1,1)])
display = serializers.SerializerMethodField('get_vote_display')
def get_vote_display(self, obj):
displays = {
-1: "downvote",
0: "novote",
1: "upvote",
}
return displays[obj['vote']]
|
DistrictDataLabs/topicmaps
|
topics/serializers.py
|
Python
|
mit
| 2,138
|
import random
import numpy as np
class ReplayBuffer(object):
def __init__(self, max_size):
self.max_size = max_size
self.cur_size = 0
self.buffer = {}
self.init_length = 0
def __len__(self):
return self.cur_size
def seed_buffer(self, episodes):
self.init_length = len(episodes)
self.add(episodes, np.ones(self.init_length))
def add(self, episodes, *args):
"""Add episodes to buffer."""
idx = 0
while self.cur_size < self.max_size and idx < len(episodes):
self.buffer[self.cur_size] = episodes[idx]
self.cur_size += 1
idx += 1
if idx < len(episodes):
remove_idxs = self.remove_n(len(episodes) - idx)
for remove_idx in remove_idxs:
self.buffer[remove_idx] = episodes[idx]
idx += 1
assert len(self.buffer) == self.cur_size
def remove_n(self, n):
"""Get n items for removal."""
# random removal
idxs = random.sample(xrange(self.init_length, self.cur_size), n)
return idxs
def get_batch(self, n):
"""Get batch of episodes to train on."""
# random batch
idxs = random.sample(xrange(self.cur_size), n)
return [self.buffer[idx] for idx in idxs], None
def update_last_batch(self, delta):
pass
class PrioritizedReplayBuffer(ReplayBuffer):
def __init__(self, max_size, alpha=0.2,
eviction_strategy='rand'):
self.max_size = max_size
self.alpha = alpha
self.eviction_strategy = eviction_strategy
assert self.eviction_strategy in ['rand', 'fifo', 'rank']
self.remove_idx = 0
self.cur_size = 0
self.buffer = {}
self.priorities = np.zeros(self.max_size)
self.init_length = 0
def __len__(self):
return self.cur_size
def add(self, episodes, priorities, new_idxs=None):
"""Add episodes to buffer."""
if new_idxs is None:
idx = 0
new_idxs = []
while self.cur_size < self.max_size and idx < len(episodes):
self.buffer[self.cur_size] = episodes[idx]
new_idxs.append(self.cur_size)
self.cur_size += 1
idx += 1
if idx < len(episodes):
remove_idxs = self.remove_n(len(episodes) - idx)
for remove_idx in remove_idxs:
self.buffer[remove_idx] = episodes[idx]
new_idxs.append(remove_idx)
idx += 1
else:
assert len(new_idxs) == len(episodes)
for new_idx, ep in zip(new_idxs, episodes):
self.buffer[new_idx] = ep
self.priorities[new_idxs] = priorities
self.priorities[0:self.init_length] = np.max(
self.priorities[self.init_length:])
assert len(self.buffer) == self.cur_size
return new_idxs
def remove_n(self, n):
"""Get n items for removal."""
assert self.init_length + n <= self.cur_size
if self.eviction_strategy == 'rand':
# random removal
idxs = random.sample(xrange(self.init_length, self.cur_size), n)
elif self.eviction_strategy == 'fifo':
# overwrite elements in cyclical fashion
idxs = [
self.init_length +
(self.remove_idx + i) % (self.max_size - self.init_length)
for i in xrange(n)]
self.remove_idx = idxs[-1] + 1 - self.init_length
elif self.eviction_strategy == 'rank':
# remove lowest-priority indices
idxs = np.argpartition(self.priorities, n)[:n]
return idxs
def sampling_distribution(self):
p = self.priorities[:self.cur_size]
p = np.exp(self.alpha * (p - np.max(p)))
norm = np.sum(p)
if norm > 0:
uniform = 0.0
p = p / norm * (1 - uniform) + 1.0 / self.cur_size * uniform
else:
p = np.ones(self.cur_size) / self.cur_size
return p
def get_batch(self, n):
"""Get batch of episodes to train on."""
p = self.sampling_distribution()
idxs = np.random.choice(self.cur_size, size=n, replace=False, p=p)
self.last_batch = idxs
return [self.buffer[idx] for idx in idxs], p[idxs]
def update_last_batch(self, delta):
"""Update last batch idxs with new priority."""
self.priorities[self.last_batch] = np.abs(delta)
self.priorities[0:self.init_length] = np.max(
self.priorities[self.init_length:])
|
n3011/deeprl
|
dataset/replay_v2.py
|
Python
|
mit
| 4,664
|
#encoding:utf-8
subreddit = 'jacksepticeye'
t_channel = '@r_jacksepticeye'
def send_post(submission, r2t):
return r2t.send_simple(submission)
|
Fillll/reddit2telegram
|
reddit2telegram/channels/~inactive/r_jacksepticeye/app.py
|
Python
|
mit
| 149
|
#!/bin/python
# The purpose of this script is to take the *machine-readable* output of UMLS
# MetaMap and convert it to something that looks like a sentence of UMLS CUIs,
# if possible. Ideally there would be an option in MetaMap to do this, assuming
# it is sensible.
import re
import sys
#INTERACTIVE = True
INTERACTIVE = False
# "hacks" to fix metamap weirdness
POSTPROC = True
if POSTPROC:
print 'WARNING: Performing dataset-specific postprocessing.'
# --- some regexes --- #
utterance_re = re.compile('^utterance\(')
phrase_re = re.compile('^phrase\(')
mappings_re = re.compile('^mappings\(')
candidates_re = re.compile('^candidates\(')
EOU_re = re.compile('^\'EOU')
# this is a file of sentences, fed into metamap
raw_data_path = ''
# --- grab in paths --- #
# this is the metamap output. YMMV
# created by the command:
# metamap14 -q -Q 3 --word_sense_disambiguation raw_data_path metamap_output_path
# must provide an input path
assert len(sys.argv) >= 2
metamap_output_path = sys.argv[1]
# optionally provide output path
# (this is the processed data path, the output of this script)
try:
proc_data_path = sys.argv[2]
# do not write over the input, please
assert not proc_data_path == metamap_output_path
except IndexError:
# not provided
proc_data_path = metamap_output_path + '.reform'
# --- open files --- #
metamap_output = open(metamap_output_path, 'r')
proc_data = open(proc_data_path, 'w')
# --- the first line is 'args', pop that --- #
args_line = metamap_output.readline()
# not sure what second line is but pop it too
unknown_line = metamap_output.readline()
# --- the relevant and important functions --- #
def parse_phrase(line, neg_dict={}):
"""
Takes a phrase from machine-readable format, parses its mappings, returns
a string of mapped terms (into CUIs, when possible).
"""
wordmap = dict()
# list of words in the phrase
# (note: the phrase looks like phrase('PHRASEHERE', [sometext(... )
phrase = re.sub('[\'\.]','',re.split(',\[[a-zA-Z]+\(', re.sub('phrase\(','', line))[0])
# get the candidates (and most importantly, their numbers)
candidates = metamap_output.readline()
if candidates == '' or not candidates_re.match(candidates):
parsed_phrase = phrase + ' '
return parsed_phrase
TotalCandidateCount = int(re.sub('candidates\(','',candidates).split(',')[0])
# get the mappings
mappings = metamap_output.readline()
if mappings == '' or not mappings_re.match(mappings):
parsed_phrase = phrase + ' '
return parsed_phrase
if TotalCandidateCount == 0:
# there were no mappings for this phrase
parsed_phrase = phrase + ' '
else:
# accounted for by other words
delwords = []
parsed_phrase = ''
# split the mappings up into 'ev's
split_mappings = mappings.split('ev(')
outstring = ''
for mapping in split_mappings[1:]:
CUI = mapping.split(',')[1].strip('\'')
try:
words = re.split('[\[\]]',','.join(mapping.split(',')[4:]))[1].split(',')
except IndexError:
# ugh, mapping is messed up
print 'WARNING: input is messed up'
return parsed_phrase
umls_strings = mapping.split(',')[2:4]
# CPI is the final [] in this mapping, I think/believe
ConceptPositionalInfo = mapping.split('[')[-1].split(']')[0]
if ConceptPositionalInfo in neg_dict:
# this concept has been negated!
# make sure it's the same one...
assert CUI in neg_dict[ConceptPositionalInfo]
# need to make sure it's ONE of the CUIs which was negated at this location
CUI = 'NOT_' + CUI
if INTERACTIVE:
outstring += '\n\tAssociation between '+ CUI + ' and ' + ', '.join(map(lambda x: '"'+x+'"',words))
if len(words) > 1:
outstring += ' (subsuming ' + ' '.join(map(lambda x: '"'+x+'"', words[1:])) + ')'
outstring += '\n\tbased on UMLS strings ' + ', '.join(umls_strings) +'\n'
wordmap[words[0]] = CUI
# if multiple words mapped to this CUI, remember to delete the rest
# that is: when we consume the sentence later we will 'replace' the
# first word in this list with the CUI, then delete the rest
# brittleness: delwords may appear elsewhere in the sentence
delwords += words[1:]
# split on spaces, commas
for word in re.split(', | ', phrase):
try:
# lowercase word, cause it is represented in the prolog that way
parsed_phrase += wordmap[word.lower()] + ' '
except KeyError:
if word.lower() in delwords:
continue
else:
parsed_phrase += word + ' '
if INTERACTIVE:
if len(wordmap) > 0:
# yolo
print '\nMapping phrase:',
print phrase, '...'
print outstring
print 'Mapped:', phrase, '--->',
print parsed_phrase
print ''
eh = raw_input('')
return parsed_phrase
def postproc_utterance(parsed_utterance):
"""
HACKS!
Do some 'manual' post-processing to make up for MetaMap peculiarity.
WARNING: dataset specific.
"""
# _ S__ DEID --> _S__DEID
parsed_utterance = re.sub('_ S__ DEID', '_S__DEID', parsed_utterance)
# _ S__ C2825141 --> _S__FINDING (FINDING...)
parsed_utterance = re.sub('_ S__ C2825141', '_S__FINDING', parsed_utterance)
return parsed_utterance
def parse_utterance(neg_dict={}):
"""
Suck in an utterance from the machine-readable format, parse its mapping
and then return a string of mapped terms (into CUIs).
May not be the same length as the input sentence.
"""
phrases = ''
line = metamap_output.readline()
while not EOU_re.match(line):
if phrase_re.match(line):
parsed_phrase = parse_phrase(line, neg_dict)
phrases += parsed_phrase
elif line == '':
# EOF I guess...
return phrases
elif not EOU_re.match(line):
print'ERROR: utterance not followed by EOU line, followed by:'
print line
sys.exit('ERROR: missing EOU')
line = metamap_output.readline()
return phrases
def parse_negline(neg_line):
"""
Parse the THIRD line of the .mmo file, where the negations are stored.
Why does it not do this per-phrase? Mystery.
We connect the negated-CUI to its appearance in the text using the
ConceptPositionalInfo which _appears_ to correspond to the PosInfo field
which appears in the ev found in a mapping.
The output is neg_dict which maps these ConceptPositionalInfos into the
associated CUIs :we use this for sanity checking while parsing the mappings;
the position should be enough to identify it, but for extra-safety we assert
that the CUIs are matching.
"""
assert 'neg_list([' in neg_line
neg_dict = dict()
# strip things out
# (removing "neg_list(["... and ..."]).\n")
l_stripped = neg_line[10:][:-5]
# split into seprate 'negations'...
# split on ( and then remove the training ", negation(" at the end, first entry is useless
negations = map(lambda x: x.rstrip(')')[:-10] if 'negation' in x else x.rstrip(')'), l_stripped.split('('))[1:]
# for each negation, grab its location and CUI
for neg in negations:
# strip the string part of the CUI: we know it's between the SECOND pair of [], and before a :
NegatedConcept = neg.split('[')[2].split(':')[0].strip('\'')
# now get the concept... we know it's in the THIRD set of []... and there may be several separated by ,
ConceptPositionalInfo = neg.split('[')[3].rstrip(']')
try:
neg_dict[ConceptPositionalInfo].add(NegatedConcept)
except KeyError:
neg_dict[ConceptPositionalInfo] = set([NegatedConcept])
return neg_dict
# --- run through the file --- #
# --- get the neglist --- #
neg_line = metamap_output.readline()
neg_dict = parse_negline(neg_line)
# the first line
n = 0
while True:
line = metamap_output.readline()
if not line: break
if utterance_re.match(line):
# we are now in an utterance!
parsed_utterance = parse_utterance(neg_dict)
if POSTPROC:
# hacky post-processing
parsed_utterance = postproc_utterance(parsed_utterance)
print 'Parsed utterance:'
print '\t','"'.join(line.split('"')[1:2]).strip('[]')
print '=====>'
print '\t',parsed_utterance
proc_data.write(parsed_utterance+'\n')
n += 1
else:
# not interested in this line
continue
proc_data.close()
print '\nWrote', n, 'sentences to', proc_data_path
|
corcra/UMLS
|
parse_metamap.py
|
Python
|
mit
| 8,989
|
from copy import copy
import numpy as np
from .fortranio import FortranFile
from .snapview import SnapshotView
class SnapshotIOException(Exception):
"""Base class for exceptions in the the snapshot module."""
def __init__(self, message):
super(SnapshotIOException, self).__init__(message)
class SnapshotHeader(object):
"""
A class for a Gadget-like header.
Accessing header data
---------------------
The header information from a header, hdr, can be accessed as,
>>> hdr.header_entry_name
where 'hdr_entry_name' can be any of the strings acting as keys of the
schema dictionary for this header type. All valid keys are contained within
the list hdr.fields.
All (entry_name, entry_value) pairs may be iterated through using
>>> for (name, data) in hdr.iterfields():
>>> # Do something
Acccessing metadata
-------------------
The Snapshot file name with which this header is associated may be accessed
as
>>> hdr.fname
'some_file_name'
"""
def __init__(self, fname, header_schema):
super(SnapshotHeader, self).__init__()
self._fname = fname
# Use copy so that reference schema is not altered.
self._schema = copy(header_schema)
self._fields = []
self.verify_schema()
self.init_fields()
@property
def fields(self):
return self._fields
@property
def fname(self):
return self._fname
@fname.setter
def fname(self, fname):
self._fname = fname
def init_fields(self):
"""Reset all header attributes to zero-like values."""
for (name, fmt) in self._schema.items():
dtype, size = fmt
data = np.zeros(size, dtype=dtype)
if size == 1:
data = data[0]
setattr(self, name, data)
def iterfields(self):
for name in self.fields:
yield (name, getattr(self, name))
def load(self):
"""Load the snapshot header from the current file."""
with FortranFile(self.fname, 'rb') as ffile:
self._load(ffile)
def to_array(self):
"""Return a structured array representing the header data."""
dtype = [(k, dt, size) for k, (dt, size) in self._schema.items()]
values = tuple(getattr(self, name) for name in self.fields)
return np.array(values, dtype=dtype)
def save(self, fname=None):
"""
Write the snapshot header to the current file, overwriting the file.
A different file name to write to may optionally be provided. This
does not modify the header's fname attribute, so later calling
load() will re-load data from the original file.
The method will raise a SnapshotIOException if the current header is
not valid. See verify().
"""
if fname is None:
fname = self.fname
if self.verify() != []:
raise SnapshotIOException("Current header state invalid")
with FortranFile(fname, 'wb') as ffile:
self._save(ffile)
def verify(self):
"""
Return a list of header attributes which do not conform to the schema.
An empty list indicates that the header is valid.
"""
malformed = []
for (name, fmt) in self._schema.items():
dtype, size = fmt
data = getattr(self, name)
try:
count = len(data)
except TypeError:
count = 1
if count != size:
malformed.append(name)
else:
try:
converted = np.asarray(data).view(dtype=dtype)
except ValueError:
malformed.append(name)
return malformed
def verify_schema(self):
"""
Verify the header formatter, and update it if necessary.
When an element type is not supplied, it is assumed to be a 4-byte
float.
When an element length is also not supplied, it is assumed to be one.
Completes the header schema if possible, else raises a
SnapshotIOException exception.
"""
self._ptypes = 0
for (name, fmt) in self._schema.items():
# So that these are defined even for an invalid formatter
dtype, size = ('f4', 1)
if len(fmt) == 2:
dtype, size = fmt
elif len(fmt) == 1:
dtype, size = (fmt[0], 1)
else:
message = "Schema for header element '%s' is invalid" % name
raise SnapshotIOException(message)
try:
dtype = np.dtype(dtype)
except TypeError:
# Given dtype does not correspond to a numpy dtype.
message = "Data type for header element '%s' is invalid." % name
raise SnapshotIOException(message)
try:
size = int(size)
except TypeError:
message = "Data size for header element '%s' is invalid." % name
raise SnapshotIOException(message)
if (dtype.itemsize * size) % 4 != 0:
message = "Data bytes for header element '%s' not a multiple of 4" % name
raise SnapshotIOException(message)
self._schema[name] = (dtype, size)
self._ptypes = max(size, self._ptypes)
self._fields = self._schema.keys()
def _load(self, ffile):
raw_header = ffile.read_record('b1')
offset = 0
for (name, fmt) in self._schema.items():
dtype, size = fmt
bytewords = dtype.itemsize * size
# Must be non-scalar ndarray, hence wrap in np.array()
raw_data = np.array(raw_header[offset:offset + bytewords])
try:
data = raw_data.view(dtype=dtype)
except ValueError:
raise SnapshotIOException('Could not reinterpret')
if size == 1:
data = data[0]
offset += bytewords
setattr(self, name, data)
def _save(self, ffile):
array = self.to_array()
ffile.write_ndarray(array)
class SnapshotBase(object):
"""
A base class for a single Gadget-like simulation snapshot.
This class defines general attributes, properties and methods for
snapshot classes. All snapshot types derive from this class.
This class is not intended to be used directly. If implementing a subclass,
it is most likely it should be a subclass of GadgetSnapshot, not this class.
Subclasses will likely need to implement the _load_block() and
_parse_block() methods.
Acessing Arrays
---------------
An array may be acessed from an instantiated SnapshotBase object, s, as,
>>> array = s.block_name
'block_name' can be any of the strings acting as keys of the schema
dictionary for this snapshot type. A list is returned, with one item for
each particle type associated with this snapshot. If a particle type is not
valid for this block, its entry in the list is None. Otherwise, it is a
numpy.ndarray. For valid-but-empty particle data in a block, an empty
numpy.ndarray is present. All valid keys are contained within the list
s.fields.
All (block_name, block_data) pairs may be iterated through using
>>> for (name, data) in s.iterfields():
>>> # Do something
Particle Type Aliases
---------------------
If provied, particle type indices may be aliased to attributes. For example,
if gas particles have particle type 0, and 'pos' is a valid field, then
>>> s.pos[0] is s.gas.pos
True
However, note that s.gas is a SnapshotView, which is a read-only object.
In order to modify the dataset one must, in general, operate on s.pos[0] or
similar.
In the case that no index-to-name mapping is provided, s.gas or similar will
raise an AttributeError. The dictionary of index-to-name mappings may be
accessed as s.ptype_aliases. It will be None if no mapping is present, it
is not required to map all valid particle indices, and it cannot be
assigned to.
Acessing metadata
-----------------
The file name and header are both properties of the snapshot, accessed
as
>>> s.fname
'some_file_name'
>>> s.header
For the latter, see the SnapshotHeader class.
The indices of all valid particle types for this snapshot are stored in the
list s.ptype_indices.
"""
def __init__(self, fname, header_schema=None, blocks_schema=None,
ptype_aliases=None, **kwargs):
"""
Initializes a Gadget-like snapshot.
header_schema defines the schema for loading the file header.
blocks_schema defines the schema for loading the various field data
ptype_aliases is an optional string-to-index mapping for the particle
types contained in the snapshot
"""
if header_schema is None:
raise TypeError("header_schema is required")
if blocks_schema is None:
raise TypeError("blocks_schema is required")
super(SnapshotBase, self).__init__(**kwargs)
self._fname = fname
self._aliases = ptype_aliases
self.header = SnapshotHeader(fname, header_schema)
self._fields = []
# Use copy so that reference schema is not altered.
self._schema = copy(blocks_schema)
self._ptypes = 0
self.verify_schema()
self.init_fields()
def __getattr__(self, name):
if self._aliases and name in self._aliases:
idx = self._aliases[name]
return self._ptype_view(idx)
else:
msg = "'%s' object has no attribute %s" % (type(self).__name__, name)
raise AttributeError(msg)
@property
def fields(self):
return self._fields
@property
def fname(self):
return self._fname
@fname.setter
def fname(self, fname):
self.header.fname = fname
self._fname = fname
@property
def ptype_aliases(self):
return self._aliases
@property
def ptype_indices(self):
"""
A list of the Gadget-like particle type indices in this snapshot.
Contains all valid particle types, some of which may not have any
associated data in the snapshot.
"""
return range(self._ptypes)
@ptype_indices.setter
def ptype_indices(self, value):
"""
Set the valid Gadget-like particle type indices for this snapshot.
Must be an iterable containing all required particle types. Gaps are
allowed; both [0, 1, 2, 3] and [0, 3] result in identical behaviour.
"""
self._ptypes = max(value)
def init_fields(self):
"""Reset all data attributes to zero-like values."""
for (name, fmt) in self._schema.items():
dtype, ndims, ptypes, _ = fmt
pdata = self._null_block(dtype, ndims, ptypes)
setattr(self, name, pdata)
def iterfields(self):
for name in self.fields:
yield (name, getattr(self, name))
def load(self):
"""Load in snapshot data from the current file."""
with FortranFile(self.fname, 'rb') as ffile:
self.header._load(ffile)
self._load(ffile)
def save(self, fname=None):
"""
Write header and snapshot to the current file, overwriting the file.
A different file name to write to may optionally be provided. This
does not modify the header's or the snapshot's fname attribute, so
later calling load() will re-load data from the original file.
The method will raise a SnapshotIOException if the any field is not
valid. See verify().
"""
if fname is None:
fname = self.fname
if self.header.verify() != []:
raise SnapshotIOException("Current header state invalid")
if self.verify() != []:
raise SnapshotIOException("A field does not match the schema")
self.update_header()
with FortranFile(fname, 'wb') as ffile:
self.header._save(ffile)
self._save(ffile)
def update_header(self):
"""
Update the header based on the current snapshot state.
This method has no effect, but is called when saving a snapshot to file.
It should be overridden by subclasses.
"""
pass
def verify(self):
"""
Return a list of fields which do not conform to the schema.
An empty list indicates that all fields are valid.
"""
malformed = []
for name in self.fields:
# If a is an empty numpy array, nothing will be written, so we
# do not need to filter out empty arrays.
dtype, ndims, _, _ = self._schema[name]
arrays = [a for a in getattr(self, name) if a is not None]
for a in arrays:
if a.dtype != dtype or (a.ndim > 1 and a.shape[-1] != ndims):
print name, a.dtype, dtype
malformed.append(name)
# Don't want duplicates; one problem is sufficient.
break
return malformed
def verify_schema(self):
"""Verify the current schema."""
self._verify_schema()
def _block_exists(self, name, ptypes):
"""
Return True if specified particle types exist for specified block.
Must be overriden by subclasses.
"""
raise NotImplementedError("Subclassees must override _block_exists")
def _get_flag(self, flag):
if isinstance(flag, str):
return getattr(self.header, flag)
else:
return flag
def _load(self, ffile):
"""
Load data for each block in the schema from the open FortranFile ffile.
Only blocks with flags resolving to True are loaded from the file.
"""
for (name, fmt) in self._schema.items():
dtype, ndims, ptypes, flag = fmt
if self._block_exists(name, ptypes) and self._get_flag(flag):
block_data = self._load_block(ffile, name, dtype)
pdata = self._parse_block(block_data, name, dtype, ndims, ptypes)
else:
pdata = self._null_block(dtype, ndims, ptypes)
setattr(self, name, pdata)
def _load_block(self, ffile, name, dtype):
"""
Return the next block from the open FortranFile ffile as an ndarray.
This is called before parsing each block's raw data, and may need to
be overriden by subclasses.
"""
return ffile.read_record(dtype)
def _null_array(self, dtype):
"""Return an empty numpy array of element type dtype."""
return np.empty(0, dtype=dtype)
def _null_block(self, dtype, ndims, ptypes):
"""
Return a block of zero-like data, or None where ptype not appropriate.
"""
pdata = []
for p in self.ptype_indices:
if p not in ptypes:
parray = None
else:
parray = self._null_array(dtype)
if ndims > 1:
parray.shape = (0, ndims)
pdata.append(parray)
return pdata
def _parse_block(self, block_data, name, dtype, ndims, ptypes):
"""
Return a list of data for each particle type in the block.
Interpret the raw data within block_data according to the schema,
and apply the specified particle type and dimensionality operations.
Must be overriden by subclasses.
"""
raise NotImplementedError("Subclasses must override _parse_block")
def _ptype_view(self, index):
ptype_data = ((name, field[index]) for name, field in self.iterfields())
view = SnapshotView(self, ptype_data)
return view
def _save(self, ffile):
for name in self.fields:
# If a is an empty numpy array, nothing will be written, so we
# do not need to filter out empty arrays.
arrays = [a for a in getattr(self, name) if a is not None]
ffile.write_ndarrays(arrays)
def _verify_schema(self):
"""
Verifies the block formatter, and updates it if necessary.
When a block's data type is not supplied, it is assumed to be 4-byte
floats.
When a block's N-dimesion value is also not supplied, it is assumed to
be 1.
When a block's particle type is also not supplied, it is assumed to
apply to all particle types.
All valid particle types must appear in at least one of the block
schemas, though a particle type of 0 is always assumed.
When called with no arguments, the internal block formatter is used.
Completes the block schema if possible, else raises a
SnapshotIOException.
"""
max_ptype = -1
for (name, fmt) in self._schema.items():
# So that these are defined even for an invalid formatter.
dtype, ndims, ptypes, flag = ('f4', 1, [None, ], True)
if len(fmt) == 4:
dtype, ndims, ptypes, flag = fmt
elif len(fmt) == 3:
dtype, ndims, ptypes = fmt
elif len(fmt) == 2:
dtype, ndims = fmt
elif len(fmt) == 1:
dtype, = fmt
else:
message = "Formatter for block '%s' is invalid" % name
raise SnapshotIOException(message)
try:
dtype = np.dtype(dtype)
except TypeError:
# Given dtype does not correspond to a numpy dtype.
message = "Data type for block '%s' is invalid." % name
raise SnapshotIOException(message)
try:
ndims = int(ndims)
except TypeError:
message = "N-dimensions size for block '%s' is invalid." % name
raise SnapshotIOException(message)
max_ptype = max(max_ptype, max(ptypes))
self._schema[name] = (dtype, ndims, ptypes, flag)
if max_ptype == -1:
message = 'At least one block schema must have specified ptypes'
raise SnapshotIOException(message)
# For any block which had no ptypes set, assume it is valid for all
# ptypes.
self._ptypes = max_ptype + 1
for (name, fmt) in self._schema.items():
_, _, ptype, _ = fmt
if ptypes == [None]:
self._schema[name] = self.ptype_indices
self._fields = self._schema.keys()
|
spthm/glio
|
snapshot.py
|
Python
|
mit
| 18,934
|
"""
Export to RAW/RPL file format
Based on:
http://www.nist.gov/lispix/doc/image-file-formats/raw-file-format.htm
"""
# Standard library modules.
import os
# Third party modules.
# Local modules.
from pyhmsa.fileformat.exporter.exporter import _Exporter, _ExporterThread
from pyhmsa.spec.datum.analysislist import AnalysisList2D
from pyhmsa.spec.datum.imageraster import ImageRaster2D, ImageRaster2DSpectral
# Globals and constants variables.
class _ExporterRAWThread(_ExporterThread):
def _run(self, datafile, dirpath, *args, **kwargs):
basefilename = datafile.header.title or 'Untitled'
keys = set(datafile.data.findkeys(AnalysisList2D)) | \
set(datafile.data.findkeys(ImageRaster2D)) | \
set(datafile.data.findkeys(ImageRaster2DSpectral))
length = len(keys)
filepaths = []
for i, identifier in enumerate(keys):
datum = datafile.data[identifier]
self._update_status(i / length, 'Exporting %s' % identifier)
filename = basefilename + '_' + identifier
lines = self._create_rpl_lines(identifier, datum)
rpl_filepath = os.path.join(dirpath, filename + '.rpl')
with open(rpl_filepath, 'w') as fp:
fp.write('\n'.join(lines))
raw_filepath = os.path.join(dirpath, filename + '.raw')
with open(raw_filepath, 'wb') as fp:
datum = datum.copy()
datum.dtype.newbyteorder('<')
fp.write(datum.tobytes())
filepaths.append(raw_filepath)
return filepaths
def _create_rpl_lines(self, identifier, datum):
lines = []
lines.append('key\t%s' % identifier)
lines.append('offset\t0')
if isinstance(datum, ImageRaster2D):
width, height = datum.shape
depth = 1
record_by = 'dont-care'
elif isinstance(datum, ImageRaster2DSpectral):
width, height, depth = datum.shape
record_by = 'vector'
elif isinstance(datum, AnalysisList2D):
depth, width, height = datum.shape
record_by = 'image'
else:
raise IOError('Unkmown datum type')
lines.append('width\t%i' % width)
lines.append('height\t%i' % height)
lines.append('depth\t%i' % depth)
lines.append('record-by\t%s' % record_by)
dtype = datum.dtype
lines.append('data-length\t%i' % dtype.itemsize)
byteorder = 'little-endian' if dtype.itemsize > 1 else 'dont-care'
lines.append('byte-order\t%s' % byteorder)
if dtype.kind == 'f':
data_type = 'float'
elif dtype.kind == 'u':
data_type = 'unsigned'
else:
data_type = 'signed'
lines.append('data-type\t%s' % data_type)
return lines
class ExporterRAW(_Exporter):
def _create_thread(self, datafile, dirpath, *args, **kwargs):
return _ExporterRAWThread(datafile, dirpath)
def validate(self, datafile):
super().validate(datafile)
identifiers = set(datafile.data.findkeys(AnalysisList2D)) | \
set(datafile.data.findkeys(ImageRaster2D)) | \
set(datafile.data.findkeys(ImageRaster2DSpectral))
if not identifiers:
raise ValueError('Datafile must contain at least one ' + \
'AnalysisList2D, ImageRaster2D or ' + \
'ImageRaster2DSpectral datum')
|
pyhmsa/pyhmsa
|
pyhmsa/fileformat/exporter/raw.py
|
Python
|
mit
| 3,504
|
import time
import cgi
import json
import os
import BaseHTTPServer
HOST_NAME = 'localhost'
PORT_NUMBER = 9400
class PlayerService(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
postvars = cgi.parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
length = int(self.headers.getheader('content-length'))
postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)
else:
postvars = {}
action = postvars['action'][0]
w, r = os.popen2("./obj/player " + action)
if 'game_state' in postvars:
game_state = postvars['game_state'][0]
w.write(game_state)
w.close()
response = r.read()
self.wfile.write(response)
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), PlayerService)
print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
|
rolandkakonyi/poker-player-objc
|
player_service.py
|
Python
|
mit
| 1,447
|
from django.http import HttpResponseRedirect
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext as _
from .custom_fields import (
BooleanTimeStampField,
BooleanTimeStampFormField,
BooleanTimeStampWidget,
)
try:
from dal_select2.widgets import ModelSelect2Multiple
except ImportError: # Avoid ImportError in the absence of django-autocomplete-light
ModelSelect2Multiple = None
class RedirectableAdmin(object):
"""If you use this as a mixin to your ModelAdmin then the change and add forms will accept
a url parameter '_redirect' and redirect to that on save"""
def response_post_save_change(self, request, obj):
if '_redirect' in request.GET:
return HttpResponseRedirect(request.GET['_redirect'])
else:
return super(RedirectableAdmin, self).response_post_save_change(request, obj)
def response_post_save_add(self, request, obj):
if '_redirect' in request.GET:
return HttpResponseRedirect(request.GET['_redirect'])
else:
return super(RedirectableAdmin, self).response_post_save_add(request, obj)
def delete_view(self, request, object_id, extra_context=None):
response = super(RedirectableAdmin, self).delete_view(request, object_id, extra_context)
if '_redirect' in request.GET and response.status_code == 302:
return HttpResponseRedirect(request.GET['_redirect'])
else:
return response
class ModifyRelatedObjectAdmin(RedirectableAdmin):
"""If you use this as a mixin to your ModelAdmin then the change form will accept
_redirect same as with RedirectableAdmin. Additionally add forms
will also accept paramters to identify a parent object and field which will
be set to the newly created object before redirecting"""
def response_post_save_add(self, request, obj):
if '_related_object' in request.GET:
app_label, model_name, object_id, field_name = request.GET['_related_object'].split(' ')
content_type = ContentType.objects.get_by_natural_key(app_label, model_name)
related_object = content_type.get_object_for_this_type(pk=object_id)
setattr(related_object, field_name, obj)
related_object.save()
return super(ModifyRelatedObjectAdmin, self).response_post_save_add(request, obj)
class HideAddRelatedMixin(object):
"""ModelAdmin mixin that disables the green 'add related object' plus icon
for any fields listed in hide_add_related_fields
Usage: hide_add_related_fields = ['user', 'group']
Alternately if there is a property 'show_add_related_fields' then this works as a whitelist"""
def get_form(self, request, obj=None, **kwargs):
form = super(HideAddRelatedMixin, self).get_form(request, obj, **kwargs)
if getattr(self, 'show_add_related_fields', None) is not None:
for field in form.base_fields.keys():
if field not in self.show_add_related_fields:
form.base_fields[field].widget.can_add_related = False
else:
for field in getattr(self, 'hide_add_related_fields', []):
form.base_fields[field].widget.can_add_related = False
return form
class DisableDeletionMixin(object):
def has_delete_permission(self, request, obj=None):
return False
class LongListFilterMixin(object):
"""Automatically reduce the amount of space taken up by very long filters.
It hides the list of options and replaces it with an input field that autocompletes.
Unlike a true autocomplete this won't save queries or speed up page load
but it's a quick and dirty improvement to the UI"""
@property
def media(self):
cdn_base = 'https://ajax.googleapis.com/ajax/libs/'
show = getattr(self, 'long_list_filter_show', 'active')
threshold = getattr(self, 'long_list_filter_threshold', '300')
height = getattr(self, 'long_list_filter_height', '100')
media = super(LongListFilterMixin, self).media
media.add_js([
'{}jqueryui/1.11.4/jquery-ui.min.js'.format(cdn_base),
'js/ixxy_admin_utils/long_list_filter.js?show={}&threshold={}&height={}'.format(
show,
threshold,
height,
),
])
media.add_css({
'all': [
'{}jqueryui/1.11.4/themes/smoothness/jquery-ui.css'.format(cdn_base)
]
})
return media
class AutocompleteMixin(object):
"""Reduces the amount of boilerplate needed by autocomplete-light.
Define a property on your ModelAdmin called 'autocomplete_widgets'.
This is a dict mapping field names to Autocomplete fields:
autocomplete_widgets = {
'contact': autocomplete.ModelSelect2(url='contact-autocomplete'),
'event': autocomplete.ModelSelect2(url='event-autocomplete'),
'team': autocomplete.ModelSelect2(url='team-autocomplete', forward=['event']),
}
"""
def formfield_for_dbfield(self, db_field, **kwargs):
# Automatically assign autocomplete widgets based on an autocomplete_widgets dict
if db_field.name in getattr(self, 'autocomplete_widgets', {}):
kwargs['widget'] = self.autocomplete_widgets[db_field.name]
return super(AutocompleteMixin, self).formfield_for_dbfield(db_field, **kwargs)
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
# Remove the hardcoded m2m help_text if the widget is ModelSelect2Multiple
form_field = super(AutocompleteMixin, self).formfield_for_manytomany(
db_field,
request,
**kwargs
)
if isinstance(form_field.widget, ModelSelect2Multiple):
unwanted_msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
form_field.help_text = form_field.help_text.replace(unwanted_msg, '')
return form_field
class BooleanTimeStampMixin(object):
"""If you this with any model containing BooleanTimeStampField
then flipping the checkbox to 'on' will set the datetime to timezone.now()
The widget will be a checkbox with the stored datetime as label"""
def formfield_for_dbfield(self, db_field, **kwargs):
if isinstance(db_field, BooleanTimeStampField):
kwargs['form_class'] = BooleanTimeStampFormField
kwargs['widget'] = BooleanTimeStampWidget(label=db_field.verbose_name.title())
kwargs['label'] = ''
kwargs.pop('request')
db_field.formfield(**kwargs)
return super(BooleanTimeStampMixin, self).formfield_for_dbfield(db_field, **kwargs)
|
DjangoAdminHackers/ixxy-admin-utils
|
ixxy_admin_utils/admin_mixins.py
|
Python
|
mit
| 6,862
|
"""
WSGI config for bugsnag_demo project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bugsnag_demo.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
overplumbum/bugsnag-python
|
example/django/bugsnag_demo/wsgi.py
|
Python
|
mit
| 399
|
import _plotly_utils.basevalidators
class TickvalsValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(
self, plotly_name="tickvals", parent_name="sunburst.marker.colorbar", **kwargs
):
super(TickvalsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "data"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/sunburst/marker/colorbar/_tickvals.py
|
Python
|
mit
| 483
|
"""Box geometry."""
from __future__ import division
from .helpers import poparg
class Box(object):
"""A Box holds the geometry of a box with a position and a size.
Because of how it is typically used, it takes a single dictionary of
arguments. The dictionary of arguments has arguments popped from it, and
others ignored::
>>> args = {'foo': 17, 'size': (10, 50), 'left': (100, 200)}
>>> b = Box(args)
>>> b.center
(105.0, 200)
>>> b.size
(10, 50)
>>> args
{'foo': 17}
The center and size are available as individual components also::
>>> b.cx
105.0
>>> b.cy
200
>>> b.w
10
>>> b.h
50
You can ask about the edges of the box as coordinates (top, bottom, left,
right) or points (north, south, east, west)::
>>> b.north
(105.0, 175.0)
>>> b.south
(105.0, 225.0)
>>> b.top
175.0
>>> b.bottom
225.0
"""
def __init__(self, args):
other_box = poparg(args, box=None)
if other_box is not None:
# Copy all the attributes of the other box.
self.__dict__.update(other_box.__dict__)
return
size = poparg(args, size=None)
assert size, "Have to specify a size!"
pos_name = pos = None
arg_names = "left center right top topleft topright".split()
for arg_name in arg_names:
arg = poparg(args, **{arg_name: None})
if arg is not None:
assert pos is None, "Got duplicate position: %s" % pos_name
pos_name = arg_name
pos = arg
# Can specify position as pos=('topright', (100,200))
pos_arg = poparg(args, pos=None)
if pos_arg is not None:
assert pos is None, "Got duplicate position: pos"
pos_name, pos = pos_arg
if pos_name == 'left':
center = (pos[0]+size[0]/2, pos[1])
elif pos_name == 'right':
center = (pos[0]-size[0]/2, pos[1])
elif pos_name == 'center':
center = pos
elif pos_name == 'top':
center = (pos[0], pos[1]+size[1]/2)
elif pos_name == 'topleft':
center = (pos[0]+size[0]/2, pos[1]+size[1]/2)
elif pos_name == 'topright':
center = (pos[0]-size[0]/2, pos[1]+size[1]/2)
else:
assert False, "Have to specify a position!"
self.cx, self.cy = center
self.w, self.h = size
self.rise = poparg(args, rise=0)
self.set = poparg(args, set=999999)
self.fade = poparg(args, fade=0)
def __repr__(self):
return "<Box ={0.w}x{0.h} @{0.cx},{0.cy}>".format(self)
def __eq__(self, other):
if not isinstance(other, Box):
return False
return (
self.center == other.center and
self.size == other.size and
self.rise == other.rise and
self.set == other.set and
self.fade == other.fade
)
def __ne__(self, other):
return not self == other
def translate(self, dx, dy):
"""Create a new box just like this one, but translated.
`dx` and `dy` are deltas for the center point. The returned box is
the same as this one, but the center has moved::
>>> b = Box(dict(size=(10,20), center=(100,200)))
>>> b2 = b.translate(1, 2)
>>> b2.center
(101, 202)
>>> b2.size
(10, 20)
The original box is unchanged::
>>> b.center
(100, 200)
"""
box = Box(dict(box=self))
box.cx += dx
box.cy += dy
return box
def scale(self, sx, sy=None):
"""Make a new box that is scaled from this one."""
sy = sy or sx
cx = self.cx * sx
cy = self.cy * sy
w = self.w * sx
h = self.h * sy
return Box(dict(size=(w, h), center=(cx, cy)))
def union(self, other):
"""Create a new box that covers self and other."""
left = min(self.left, other.left)
right = max(self.right, other.right)
top = min(self.top, other.top)
bottom = max(self.bottom, other.bottom)
width = right - left
height = bottom - top
box = Box(dict(size=(width, height), topleft=(left, top)))
return box
@property
def center(self):
"""The center point of the box."""
return self.cx, self.cy
@property
def size(self):
"""The width and height as a pair."""
return self.w, self.h
@property
def top(self):
"""The y-coodinate of the top edge."""
return self.cy - self.h/2
@property
def bottom(self):
"""The y-coordinate of the bottom edge."""
return self.cy + self.h/2
@property
def left(self):
"""The x-coordinate of the left edge."""
return self.cx - self.w/2
@property
def right(self):
"""The x-coordinate of the right edge."""
return self.cx + self.w/2
@property
def north(self):
"""The point at the north of the box."""
return self.cx, self.top
@property
def south(self):
"""The point at the south of the box."""
return self.cx, self.bottom
@property
def east(self):
"""The point at the east of the box."""
return self.right, self.cy
@property
def west(self):
"""The point at the west of the box."""
return self.left, self.cy
|
nedbat/cupid
|
cupid/box.py
|
Python
|
mit
| 5,647
|
"""
Recursive data-types and support functions.
"""
|
OaklandPeters/recursor
|
recursor/__init__.py
|
Python
|
mit
| 52
|
from django.conf.urls import *
from apps.profile import views
urlpatterns = patterns('',
url(r'^get_preferences?/?', views.get_preference),
url(r'^set_preference/?', views.set_preference),
url(r'^set_account_settings/?', views.set_account_settings),
url(r'^get_view_setting/?', views.get_view_setting),
url(r'^set_view_setting/?', views.set_view_setting),
url(r'^set_collapsed_folders/?', views.set_collapsed_folders),
url(r'^paypal_form/?', views.paypal_form),
url(r'^paypal_return/?', views.paypal_return, name='paypal-return'),
url(r'^is_premium/?', views.profile_is_premium, name='profile-is-premium'),
url(r'^paypal_ipn/?', include('paypal.standard.ipn.urls'), name='paypal-ipn'),
url(r'^stripe_form/?', views.stripe_form, name='stripe-form'),
url(r'^activities/?', views.load_activities, name='profile-activities'),
url(r'^payment_history/?', views.payment_history, name='profile-payment-history'),
url(r'^cancel_premium/?', views.cancel_premium, name='profile-cancel-premium'),
url(r'^refund_premium/?', views.refund_premium, name='profile-refund-premium'),
url(r'^upgrade_premium/?', views.upgrade_premium, name='profile-upgrade-premium'),
url(r'^delete_account/?', views.delete_account, name='profile-delete-account'),
url(r'^forgot_password_return/?', views.forgot_password_return, name='profile-forgot-password-return'),
url(r'^forgot_password/?', views.forgot_password, name='profile-forgot-password'),
url(r'^delete_all_sites/?', views.delete_all_sites, name='profile-delete-all-sites'),
url(r'^email_optout/?', views.email_optout, name='profile-email-optout'),
)
|
eric-stanley/NewsBlur
|
apps/profile/urls.py
|
Python
|
mit
| 1,662
|
#!/usr/bin/env python
"""
Main command line script of the pas package.
The main function contained in this module is used ai main entry point for the
pas command line utility.
The script is automatically created by setuptool, but this file can be
directly invoked with `python path/to/pas.py` or directly if its executable
flag is set.
"""
import itertools
import logging
import logging.handlers
import os
import sys
# pylint: disable-msg=E0611
# I know relative imports are not the holy grail, but here we need them and
# it is a pylint bug not to recognized empty parent paths.
from .. import commands # Relative imports to avoid name clashing
from ..conf import settings # Relative imports to avoid name clashing
# pylint: enable-msg=E0611
# Reenable unknown name detection
from fabric.state import connections
# pylint: disable-msg=W0105
# Docstring for variables are not recognized by pylint, but epydoc parses them
LOGFILE = os.getenv('PAS_LOGFILE') or 'pas.log'
"""Logfile name, settable using the PAS_LOGFILE env variable"""
VERBOSITY = logging.INFO
"""Default verbosity for console output"""
def main():
"""
First function called upon command line invocation. Builds the command
line parser, parses the arguments, configures logging and invokes the
command.
"""
# Configure logging
file_formatter = logging.Formatter("%(asctime)s - %(levelname)10s - " \
"%(message)s (%(pathname)s:%(lineno)d)")
console_formatter = logging.Formatter("%(levelname)10s: %(message)s")
# All console output not explicitly directed to the user should be a log
# message instead
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(console_formatter)
console_handler.setLevel(20) # Don't show debug log messages until the
# verbosity is set
# Buffer the logging until no errors happen
buffered_handler = logging.handlers.MemoryHandler(9999, logging.CRITICAL)
# Capture all logging output and write it to the specified log file
file_handler = logging.FileHandler('pas.log', 'w', delay=True)
file_handler.setFormatter(file_formatter)
file_handler.setLevel(40)
logger = logging.getLogger()
logger.setLevel(1)
logger.addHandler(console_handler)
logger.addHandler(buffered_handler)
# Build base parser
parser = commands.build_mainparser()
arguments = itertools.takewhile(lambda x: x.startswith('-'), sys.argv[1:])
arguments = (arg for arg in arguments if arg not in ('-h', '--help'))
command_line = sys.argv[:1] + list(arguments)
# Parse the base arguments (verbosity and settings)
args, remaining = parser.parse_known_args(command_line)
buffered_handler.setTarget(file_handler)
# Get the verbosity level
verbosity = max(1, VERBOSITY - 10 * (len(args.verbose) - len(args.quiet)))
console_handler.setLevel(verbosity)
file_handler.setLevel(1)
paramiko_logger = logging.getLogger('paramiko.transport')
paramiko_logger.setLevel(verbosity + 10)
# Load settings
try:
settings.loadfrompath(path=args.settings)
nosettings = False
except ImportError:
from ..conf import basesettings
settings.load(basesettings)
nosettings = True
# Build complete parser
parser = commands.build_subparsers(parser)
# Parse arguments
command = args = parser.parse_args()
res = 0
# Check that settings where loaded if needed
if not getattr(command.execute, 'nosettings', False) and nosettings:
logger.critical("This command requires the settings module to be " \
"present on path or defined using the " \
"PAS_SETTINGS_MODULE environment variable.")
res = 1
# Execute command
if not res:
res = command.execute(args)
# Cleanup fabric connections if needed
for key in connections.keys():
connections[key].close()
del connections[key]
# Check execution result
if res:
# ...an error occurred, write the logfile
buffered_handler.flush()
print
print "pas exited with a non-zero exit status (%d). A complete log " \
"was stored in the %s file." % (res, LOGFILE)
print
else:
# ...no errors occurred, avoid to flush the buffer
buffered_handler.setTarget(None)
# Need to close the buffered handler before sysexit is called or it will
# result in an exception
buffered_handler.close()
return res
if __name__ == '__main__':
sys.exit(main())
|
GaretJax/pop-analysis-suite
|
pas/bin/pas.py
|
Python
|
mit
| 4,689
|
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import json
from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
from pdc.apps.common.serializers import StrictSerializerMixin, DynamicFieldsSerializerMixin
class DefaultFilenameGenerator(object):
doc_format = '{name}-{version}-{release}.{arch}.rpm'
def __call__(self):
return models.RPM.default_filename(self.field.parent.initial_data)
def set_context(self, field):
self.field = field
class DependencySerializer(serializers.BaseSerializer):
doc_format = '{ "recommends": ["string"], "suggests": ["string"], "obsoletes": ["string"],' \
'"provides": ["string"], "conflicts": ["string"], "requires": ["string"] }'
def to_representation(self, deps):
return deps
def to_internal_value(self, data):
choices = dict([(y, x) for (x, y) in models.Dependency.DEPENDENCY_TYPE_CHOICES])
result = []
for key in data:
if key not in choices:
raise serializers.ValidationError('<{}> is not a known dependency type.'.format(key))
type = choices[key]
if not isinstance(data[key], list):
raise serializers.ValidationError('Value for <{}> is not a list.'.format(key))
result.extend([self._dep_to_internal(type, key, dep) for dep in data[key]])
return result
def _dep_to_internal(self, type, human_type, data):
if not isinstance(data, basestring):
raise serializers.ValidationError('Dependency <{}> for <{}> is not a string.'.format(data, human_type))
m = models.Dependency.DEPENDENCY_PARSER.match(data)
if not m:
raise serializers.ValidationError('Dependency <{}> for <{}> has bad format.'.format(data, human_type))
groups = m.groupdict()
return models.Dependency(name=groups['name'], type=type,
comparison=groups.get('op'), version=groups.get('version'))
class RPMSerializer(StrictSerializerMixin,
DynamicFieldsSerializerMixin,
serializers.ModelSerializer):
filename = serializers.CharField(default=DefaultFilenameGenerator())
linked_releases = serializers.SlugRelatedField(many=True, slug_field='release_id',
queryset=models.Release.objects.all(), required=False)
linked_composes = serializers.SlugRelatedField(read_only=True, slug_field='compose_id', many=True)
dependencies = DependencySerializer(required=False, default={})
class Meta:
model = models.RPM
fields = ('id', 'name', 'version', 'epoch', 'release', 'arch', 'srpm_name',
'srpm_nevra', 'filename', 'linked_releases', 'linked_composes',
'dependencies')
def create(self, validated_data):
dependencies = validated_data.pop('dependencies', [])
instance = super(RPMSerializer, self).create(validated_data)
for dep in dependencies:
dep.rpm = instance
dep.save()
return instance
def update(self, instance, validated_data):
dependencies = validated_data.pop('dependencies', None)
instance = super(RPMSerializer, self).update(instance, validated_data)
if dependencies is not None or not self.partial:
models.Dependency.objects.filter(rpm=instance).delete()
for dep in dependencies or []:
dep.rpm = instance
dep.save()
return instance
class ImageSerializer(StrictSerializerMixin, serializers.ModelSerializer):
image_format = serializers.SlugRelatedField(slug_field='name', queryset=models.ImageFormat.objects.all())
image_type = serializers.SlugRelatedField(slug_field='name', queryset=models.ImageType.objects.all())
composes = serializers.SlugRelatedField(read_only=True,
slug_field='compose_id',
many=True)
class Meta:
model = models.Image
fields = ('file_name', 'image_format', 'image_type', 'disc_number',
'disc_count', 'arch', 'mtime', 'size', 'bootable',
'implant_md5', 'volume_id', 'md5', 'sha1', 'sha256',
'composes')
class RPMRelatedField(serializers.RelatedField):
def to_representation(self, value):
return unicode(value)
def to_internal_value(self, data):
request = self.context.get('request', None)
if isinstance(data, dict):
required_data = {}
errors = {}
for field in ['name', 'epoch', 'version', 'release', 'arch', 'srpm_name']:
try:
required_data[field] = data[field]
except KeyError:
errors[field] = 'This field is required.'
if errors:
raise serializers.ValidationError(errors)
# NOTE(xchu): pop out fields not in unique_together
required_data.pop('srpm_name')
try:
rpm = models.RPM.objects.get(**required_data)
except (models.RPM.DoesNotExist,
models.RPM.MultipleObjectsReturned):
serializer = RPMSerializer(data=data,
context={'request': request})
if serializer.is_valid():
rpm = serializer.save()
model_name = ContentType.objects.get_for_model(rpm).model
if request and request.changeset:
request.changeset.add(model_name,
rpm.id,
'null',
json.dumps(rpm.export()))
return rpm
else:
raise serializers.ValidationError(serializer.errors)
except Exception as err:
raise serializers.ValidationError("Can not get or create RPM with your input(%s): %s." % (data, err))
else:
return rpm
else:
raise serializers.ValidationError("Unsupported RPM input.")
class ArchiveSerializer(StrictSerializerMixin, serializers.ModelSerializer):
class Meta:
model = models.Archive
fields = ('build_nvr', 'name', 'size', 'md5')
class ArchiveRelatedField(serializers.RelatedField):
def to_representation(self, value):
serializer = ArchiveSerializer(value)
return serializer.data
def to_internal_value(self, data):
request = self.context.get('request', None)
if isinstance(data, dict):
required_data = {}
errors = {}
for field in ['build_nvr', 'name', 'size', 'md5']:
try:
required_data[field] = data[field]
except KeyError:
errors[field] = 'This field is required.'
if errors:
raise serializers.ValidationError(errors)
# NOTE(xchu): pop out fields not in unique_together
required_data.pop('size')
try:
archive = models.Archive.objects.get(**required_data)
except (models.Archive.DoesNotExist,
models.Archive.MultipleObjectsReturned):
serializer = ArchiveSerializer(data=data,
context={'request': request})
if serializer.is_valid():
archive = serializer.save()
model_name = ContentType.objects.get_for_model(archive).model
if request and request.changeset:
request.changeset.add(model_name,
archive.id,
'null',
json.dumps(archive.export()))
return archive
else:
raise serializers.ValidationError(serializer.errors)
except Exception as err:
raise serializers.ValidationError("Can not get or create Archive with your input(%s): %s." % (data, err))
else:
return archive
else:
raise serializers.ValidationError("Unsupported Archive input.")
class BuildImageSerializer(StrictSerializerMixin, serializers.HyperlinkedModelSerializer):
image_format = serializers.SlugRelatedField(slug_field='name', queryset=models.ImageFormat.objects.all())
rpms = RPMRelatedField(many=True, read_only=False, queryset=models.RPM.objects.all(), required=False)
archives = ArchiveRelatedField(many=True, read_only=False, queryset=models.Archive.objects.all(), required=False)
releases = serializers.SlugRelatedField(many=True, slug_field='release_id', queryset=models.Release.objects.all(),
required=False)
class Meta:
model = models.BuildImage
fields = ('url', 'image_id', 'image_format', 'md5', 'rpms', 'archives', 'releases')
|
lao605/product-definition-center
|
pdc/apps/package/serializers.py
|
Python
|
mit
| 9,310
|
import view
try:
view.main()
except:
print('Invalid List Format')
view.terminate()
|
surru/Three-Musketeers-Game
|
multiagent/main.py
|
Python
|
mit
| 113
|
# -*- coding: utf-8 -*-
if False:
from gluon import current, URL, SQLFORM, redirect
from gluon import IS_NOT_EMPTY, Field, IS_EMAIL
from gluon import IS_NOT_IN_DB
request = current.request
response = current.response
session = current.session
cache = current.cache
T = current.T
from db import db, auth
@auth.requires_login()
def index():
"""
Show the user the organizations he/she can access
"""
query = (db.organization.id > 0)
query &= (
auth.accessible_query('read', db.organization) |
auth.accessible_query('update', db.organization))
orgs = db(query).select(db.organization.ALL)
return locals()
@auth.requires(
auth.has_permission('read', db.organization, request.args(0)) or
auth.has_permission('update', db.organization, request.args(0))
)
def view():
"""
Show the list of desks in this org
"""
org = db.organization(request.args(0))
session.org_id = org.id
return locals()
@auth.requires(auth.has_permission('update', db.organization, request.args(0)))
def edit():
org = db.organization(request.args(0))
tbl = db.organization
tbl.users.readable = False
tbl.users.writable = False
tbl.desks.readable = False
tbl.desks.writable = False
tbl.name.requires = [IS_NOT_EMPTY()]
# edit form
form = SQLFORM(db.organization, record=org, showid=False)
if form.process().accepted:
redirect(URL('view', args=[org.id]))
return locals()
@auth.requires(auth.has_permission('update', db.organization, request.args(0)))
def members():
org = db.organization(request.args(0))
if not request.args(1):
fld_email = Field('email', 'string', label=T("Email"))
fld_email.requires = IS_EMAIL()
form = SQLFORM.factory(
fld_email,
formstyle='bootstrap3_inline',
submit_button=T("Add user"),
table_name='members')
if form.process().accepted:
u = db.auth_user(email=form.vars.email)
if u is not None:
# create new share
if u.id in org.users:
form.errors.email = T(
"The user is already in the organization")
else:
user_list = org.users
user_list.insert(0, u.id)
org.update_record(users=user_list)
g_id = auth.user_group(u.id)
auth.add_permission(g_id, 'read', db.organization, org.id)
else:
# no user with that email
response.flash = ""
form.errors.email = T("The user don't exists on this system")
elif request.args(1) == 'delete':
# remove the user on args(2) from the org members list
# TODO: remove else any perms on the org desks
user_to_remove = db.auth_user(request.args(2))
if user_to_remove is not None:
user_list = org.users
user_list.remove(user_to_remove.id)
org.update_record(users=user_list)
# remove perms over the org
auth.del_permission(
auth.user_group(user_to_remove.id),
'read',
db.organization,
org.id)
# remove, also, all rights over the desks in the org.
desk_perms = [
'read_desk', 'update_items', 'push_items', 'update_desk']
for desk_id in org.desks:
for perm in desk_perms:
auth.del_permission(
auth.user_group(user_to_remove.id),
perm,
db.desk,
desk_id
)
redirect(URL('org', 'members', args=[org.id]))
return locals()
@auth.requires_login()
def create():
"""Create a new organization"""
tbl = db.organization
tbl.users.readable = False
tbl.users.writable = False
tbl.desks.readable = False
tbl.desks.writable = False
tbl.name.requires = [
IS_NOT_EMPTY(
error_message=T("Cannot be empty")
),
IS_NOT_IN_DB(
db,
'organization.name',
error_message=T(
"An Organization witch that name is allready in nStock"))]
form = SQLFORM(tbl)
form.add_button(T('Cancel'), URL('index'))
if form.process().accepted:
# add the new organization
g_id = auth.user_group(auth.user.id)
# give the user all perms over this org
auth.add_permission(g_id, 'update', tbl, form.vars.id)
auth.add_permission(g_id, 'read', tbl, form.vars.id)
auth.add_permission(g_id, 'delete', tbl, form.vars.id)
redirect(URL('index'))
return locals()
|
ybenitezf/nstock
|
controllers/org.py
|
Python
|
mit
| 4,822
|
import unittest
from tweetMining import TweetMining, TweetProxy, TestProxy, HttpProxy
import nltk
class TweetMiningTestCase(unittest.TestCase):
def setUp(self):
self.tweetMining = TweetMining(proxy='test')
self.search = self.tweetMining.search(q="twitter")
self.userInfoResponse = self.tweetMining.userInfo(username="fakeusername")
def tearDown(self):
self.tweetMining = None
def test_instanceIsNotNone(self):
self.assertIsNotNone(self.tweetMining)
def test_tweetMiningIsInstanceOf(self):
self.assertIsInstance(self.tweetMining, TweetMining)
# setProxy
def test_setProxy_exists(self):
self.assertTrue(callable(getattr(self.tweetMining, "setProxy")))
def test_setProxy_Raises_ExceptionWithWrongInput(self):
self.assertRaises(Exception, self.tweetMining.setProxy, 1)
self.assertRaises(Exception, self.tweetMining.setProxy, "wrong")
def test_setProxy_Returns_TweetProxyInstance(self):
actual = self.tweetMining.setProxy('test')
self.assertIsInstance(actual, TweetProxy)
def test_setProxy_Returns_TestProxyInstance(self):
actual = self.tweetMining.setProxy('test')
self.assertIsInstance(actual, TestProxy)
def test_setProxy_Returns_HttpProxyInstance(self):
actual = self.tweetMining.setProxy('http')
self.assertIsInstance(actual, HttpProxy)
# Trends
def test_Trends_exists(self):
self.assertTrue(callable(getattr(self.tweetMining, "trends")))
def test_Trends_returnsADict(self):
self.assertIsInstance(self.tweetMining.trends(), type({}))
def test_Trends_containsTrendsKey(self):
result = self.tweetMining.trends()
actual = 'trends' in result.keys()
self.assertTrue(actual)
def test_TrendsKeyIsAnArray(self):
result = self.tweetMining.trends()
actual = result['trends']
self.assertTrue(isinstance(actual, list))
def test_Trends_containsAs_OfKey(self):
result = self.tweetMining.trends()
actual = 'as_of' in result.keys()
self.assertTrue(actual)
def test_As_OfKeyIsAString(self):
result = self.tweetMining.trends()
actual = str(result['as_of'])
self.assertTrue(isinstance(actual, str))
# Search
def test_search_exists(self):
self.assertTrue(callable(getattr(self.tweetMining, "search")))
def test_search_returnsADict(self):
self.assertIsInstance(self.search, type({}))
def test_search_containsResultsKey(self):
actual = 'results' in self.search.keys()
self.assertTrue(actual)
def test_ResultsKeyIsAnArray(self):
actual = self.search['results']
self.assertTrue(isinstance(actual, list))
def test_search_containsSince_IdKey(self):
actual = 'since_id' in self.search.keys()
self.assertTrue(actual)
def test_ResultsKeyIsAnArray(self):
actual = self.search['since_id']
self.assertTrue(isinstance(actual, int))
def test_search_containsQueryKey(self):
actual = 'query' in self.search.keys()
self.assertTrue(actual)
def test_QueryKeyIsAString(self):
actual = self.search['query']
self.assertTrue(isinstance(actual, (str, unicode)))
def test_search_containsResults_per_pageKey(self):
actual = 'results_per_page' in self.search.keys()
self.assertTrue(actual)
def test_Results_Per_PageKeyIsAnInt(self):
actual = self.search['results_per_page']
self.assertTrue(isinstance(actual, int))
def test_search_containsMaxIdKey(self):
actual = 'max_id' in self.search.keys()
self.assertTrue(actual)
def test_Max_IdKeyIsAnInteger(self):
actual = self.search['max_id']
self.assertTrue(isinstance(actual, (int, long)))
def test_serach_containsPageKey(self):
actual = 'page' in self.search.keys()
self.assertTrue(actual)
def test_PageKeyIsAnInt(self):
actual = self.search['page']
self.assertTrue(isinstance(actual, int))
def test_search_containsNextPageKey(self):
actual = 'next_page' in self.search.keys()
self.assertTrue(actual)
def test_NextPageKeyIsAString(self):
actual = self.search['next_page']
self.assertTrue(isinstance(actual, (str, unicode)))
def test_search_containsCompleted_InKey(self):
actual = 'completed_in' in self.search.keys()
self.assertTrue(actual)
def test_CompletedInKeyIsFloat(self):
actual = self.search['completed_in']
self.assertTrue(isinstance(actual, (float)))
def test_search_containsRefreshUrlKey(self):
actual = 'refresh_url' in self.search.keys()
self.assertTrue(actual)
def test_RefreshUrlKeyIsAString(self):
actual = self.search['refresh_url']
self.assertTrue(isinstance(actual, (str, unicode)))
# Words
def test_words_exists(self):
self.assertTrue(callable(getattr(self.tweetMining, "words")))
def test_words_raisesAnExceptionWithWrongInput(self):
self.assertRaises(Exception, self.tweetMining.words, 1)
self.assertRaises(Exception, self.tweetMining.words, "1")
self.assertRaises(Exception, self.tweetMining.words, (1,))
self.assertRaises(Exception, self.tweetMining.words, {1:1})
def test_words_acceptsAListAsInput(self):
self.assertIsInstance(self.tweetMining.words([]), list)
def test_words_returnsAnArray(self):
actual = self.tweetMining.words(self.search['results'])
self.assertIsInstance(actual, list)
# FreqDist
def test_freqDist_exists(self):
self.assertTrue(callable(getattr(self.tweetMining, "freqDist")))
def test_freqDist_raisesAnExceptionWithWrongInput(self):
self.assertRaises(Exception, self.tweetMining.freqDist, 1)
self.assertRaises(Exception, self.tweetMining.freqDist, "1")
self.assertRaises(Exception, self.tweetMining.freqDist, (1,))
self.assertRaises(Exception, self.tweetMining.freqDist, {1:1})
def test_freqDist_acceptsAListAsInput(self):
self.assertEquals(type(self.tweetMining.freqDist([])), nltk.probability.FreqDist)
def test_freqDist_returnsAnArray(self):
words = self.tweetMining.words(self.search['results'])
actual = self.tweetMining.freqDist(words)
self.assertEquals(type(actual), nltk.probability.FreqDist)
# _get_rt_sources
def test_getRTSources_exists(self):
self.assertTrue(callable(getattr(self.tweetMining, "_getRTSources")))
def test_getRTSources_returnsAList(self):
actual = self.tweetMining._getRTSources('RT @user la la la')
self.assertIsInstance(actual, list)
def test_getRTSources_raisesAnExceptionWithWrongInput(self):
self.assertRaises(Exception, self.tweetMining._getRTSources, 1)
self.assertRaises(Exception, self.tweetMining._getRTSources, [])
self.assertRaises(Exception, self.tweetMining._getRTSources, {})
# buildRetweetGraph
def test_buildRetweetGraph_exists(self):
self.assertTrue(callable(getattr(self.tweetMining, "buildRetweetGraph")))
def test_buildRetweetGraph_ReturnsADict(self):
actual = self.tweetMining.buildRetweetGraph(self.search['results'])
self.assertIsInstance(actual, dict)
def test_buildRetweetGraph_Dict_containsGraphKey(self):
actual = self.tweetMining.buildRetweetGraph(self.search['results'])
self.assertTrue('graph' in actual.keys())
self.assertIsNotNone(actual['graph'])
def test_buildRetweetGraph_RaisesAnExceptionWithWrongInput(self):
self.assertRaises(Exception ,self.tweetMining.buildRetweetGraph, 1)
self.assertRaises(Exception ,self.tweetMining.buildRetweetGraph, "1")
self.assertRaises(Exception ,self.tweetMining.buildRetweetGraph, {})
# userInfo
def test_userInfo_exists(self):
self.assertTrue(callable(getattr(self.tweetMining, "userInfo")))
def test_userInfo_ReturnsADict(self):
actual = self.userInfoResponse
self.assertIsInstance(actual, dict)
def test_userInfo_Dict_ContainsAProfile_Background_TileKey(self):
key = 'profile_background_tile'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, bool)
def test_userInfo_Dict_ContainsAProtectedKey(self):
key = 'protected'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, bool)
def test_userInfo_Dict_ContainsAShow_All_Inline_MediaKey(self):
key = 'show_all_inline_media'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, bool)
def test_userInfo_Dict_ContainsAListedCountKey(self):
key = 'listed_count'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, int)
def test_userInfo_Dict_ContainsAContributorsEnabledKey(self):
key = 'contributors_enabled'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, bool)
def test_userInfo_Dict_ContainsAProfile_Sidebar_fill_colorKey(self):
key = 'profile_sidebar_fill_color'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, unicode)
def test_userInfo_Dict_ContainsANameKey(self):
key = 'name'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, unicode)
def test_userInfo_Dict_Contains_VerifiedKey(self):
key = 'verified'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, bool)
def test_userInfo_Dict_Contains_LangKey(self):
key = 'lang'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, unicode)
def test_userInfo_Dict_Contains_DescriptionKey(self):
key = 'description'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, unicode)
def test_userInfo_Dict_Contains_StatusesCountKey(self):
key = 'statuses_count'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, int)
def test_userInfo_Dict_Contains_Profile_Image_Url(self):
key = 'profile_image_url'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, unicode)
def test_userInfo_Dict_Contains_StatusKey(self):
key = 'status'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, dict)
def test_userInfo_Dict_Contains_UrlKey(self):
key = 'url'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, unicode)
def test_userInfo_Dict_Contains_Screen_NameKey(self):
key = 'screen_name'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value,unicode)
def test_userInfo_Dict_Contains_Friends_CountKey(self):
key = 'friends_count'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, int)
def test_userInfo_Dict_Contains_Followers_CountKey(self):
key = 'followers_count'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, int)
def test_userInfo_Dict_Contains_Favourites_CountKey(self):
key = 'favourites_count'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, int)
def test_userInfo_Dict_Contains_IdKey(self):
key = 'id'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, int)
def test_userInfo_Dict_Contains_IdStrKey(self):
key = 'id_str'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, unicode)
# _getFactory
def test_userInfo_Dict_Contains_Friends_CountKey(self):
key = 'friends_count'
value = self.userInfoResponse.get(key)
self.assertTrue(key in self.userInfoResponse.keys())
self.assertIsInstance(value, int)
def test__getFactoryProxy_exists(self):
self.assertTrue(callable(getattr(self.tweetMining, "_getFactoryProxy")))
def test__getFactoryProxy_Raises_ExceptionWithWrongInput(self):
self.assertRaises(Exception, self.tweetMining._getFactoryProxy, "wrong")
self.assertRaises(Exception, self.tweetMining._getFactoryProxy, 1)
def test__getFactoryProxy_Returns_TweetProxyInstance(self):
actual = self.tweetMining._getFactoryProxy('test')
self.assertIsInstance(actual, TweetProxy)
|
domenicosolazzo/TweetMining
|
tests/test_tweetMining.py
|
Python
|
mit
| 13,682
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.IEC61970.Wires.Switch import Switch
class ProtectedSwitch(Switch):
"""A ProtectedSwitch is a switching device that can be operated by ProtectionEquipment.A ProtectedSwitch is a switching device that can be operated by ProtectionEquipment.
"""
def __init__(self, breakingCapacity=0.0, ProtectionEquipments=None, RecloseSequences=None, *args, **kw_args):
"""Initialises a new 'ProtectedSwitch' instance.
@param breakingCapacity: The maximum fault current a breaking device can break safely under prescribed conditions of use.
@param ProtectionEquipments: Protection equipments that operate this ProtectedSwitch.
@param RecloseSequences: A breaker may have zero or more automatic reclosures after a trip occurs.
"""
#: The maximum fault current a breaking device can break safely under prescribed conditions of use.
self.breakingCapacity = breakingCapacity
self._ProtectionEquipments = []
self.ProtectionEquipments = [] if ProtectionEquipments is None else ProtectionEquipments
self._RecloseSequences = []
self.RecloseSequences = [] if RecloseSequences is None else RecloseSequences
super(ProtectedSwitch, self).__init__(*args, **kw_args)
_attrs = ["breakingCapacity"]
_attr_types = {"breakingCapacity": float}
_defaults = {"breakingCapacity": 0.0}
_enums = {}
_refs = ["ProtectionEquipments", "RecloseSequences"]
_many_refs = ["ProtectionEquipments", "RecloseSequences"]
def getProtectionEquipments(self):
"""Protection equipments that operate this ProtectedSwitch.
"""
return self._ProtectionEquipments
def setProtectionEquipments(self, value):
for p in self._ProtectionEquipments:
filtered = [q for q in p.ProtectedSwitches if q != self]
self._ProtectionEquipments._ProtectedSwitches = filtered
for r in value:
if self not in r._ProtectedSwitches:
r._ProtectedSwitches.append(self)
self._ProtectionEquipments = value
ProtectionEquipments = property(getProtectionEquipments, setProtectionEquipments)
def addProtectionEquipments(self, *ProtectionEquipments):
for obj in ProtectionEquipments:
if self not in obj._ProtectedSwitches:
obj._ProtectedSwitches.append(self)
self._ProtectionEquipments.append(obj)
def removeProtectionEquipments(self, *ProtectionEquipments):
for obj in ProtectionEquipments:
if self in obj._ProtectedSwitches:
obj._ProtectedSwitches.remove(self)
self._ProtectionEquipments.remove(obj)
def getRecloseSequences(self):
"""A breaker may have zero or more automatic reclosures after a trip occurs.
"""
return self._RecloseSequences
def setRecloseSequences(self, value):
for x in self._RecloseSequences:
x.ProtectedSwitch = None
for y in value:
y._ProtectedSwitch = self
self._RecloseSequences = value
RecloseSequences = property(getRecloseSequences, setRecloseSequences)
def addRecloseSequences(self, *RecloseSequences):
for obj in RecloseSequences:
obj.ProtectedSwitch = self
def removeRecloseSequences(self, *RecloseSequences):
for obj in RecloseSequences:
obj.ProtectedSwitch = None
|
rwl/PyCIM
|
CIM15/IEC61970/Wires/ProtectedSwitch.py
|
Python
|
mit
| 4,514
|
import sqlite3
import numpy
import scipy.sparse
from django.test import SimpleTestCase
from matrixstore.serializer import serialize, serialize_compressed, deserialize
class TestSerializer(SimpleTestCase):
def test_simple_serialisation(self):
obj = {"hello": 123}
self.assertEqual(deserialize(serialize(obj)), obj)
def test_simple_serialisation_with_compression(self):
obj = {"hello": "world" * 256}
data = serialize(obj)
compressed_data = serialize_compressed(obj)
self.assertLess(len(compressed_data), len(data))
self.assertEqual(deserialize(compressed_data), obj)
def test_matrix_serialisation(self):
obj = scipy.sparse.csc_matrix((5, 4))
new_obj = deserialize(serialize(obj))
self.assertTrue(numpy.array_equal(obj.todense(), new_obj.todense()))
def test_dtype_is_preserved(self):
obj = scipy.sparse.csc_matrix((5, 4), dtype=numpy.uint16)
new_obj = deserialize(serialize(obj))
self.assertEqual(obj.dtype, new_obj.dtype)
def test_sqlite_roundtrip(self):
obj = {"hello": 123}
data = serialize(obj)
new_data = roundtrip_through_sqlite(data)
new_obj = deserialize(new_data)
self.assertEqual(new_obj, obj)
def test_sqlite_roundtrip_with_compression(self):
obj = {"hello": "world" * 256}
data = serialize_compressed(obj)
new_data = roundtrip_through_sqlite(data)
new_obj = deserialize(new_data)
self.assertEqual(new_obj, obj)
def roundtrip_through_sqlite(value):
db = sqlite3.connect(":memory:")
db.execute("CREATE TABLE data (value BLOB)")
db.execute("INSERT INTO data VALUES (?)", [value])
result = db.execute("SELECT value FROM data")
new_value = result.fetchone()[0]
db.close()
return new_value
|
ebmdatalab/openprescribing
|
openprescribing/matrixstore/tests/test_serializer.py
|
Python
|
mit
| 1,843
|
import pika
import pickle
from display import LCDLinearScroll
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='clock_output', type='fanout')
result = channel.queue_declare(exclusive=True)
queue_name = result.method.queue
channel.queue_bind(exchange='clock_output', queue=queue_name)
print ' [*] Waiting for messages. To exit press CTRL+C'
def select_callback():
print("select message sent")
channel.basic_publish(exchange='clock_output', routing_key='', body='ALARM_STOP')
channel.basic_publish(exchange='clock_output', routing_key='', body='ALARM_CANCEL')
def callback(ch, method, properties, body):
print("message received: {0}".format(body))
if body == "ALARM_START":
items = ("It's sunny today", "Meeting at 2pm")
lcd_scroller = LCDLinearScroll(items, select_callback=select_callback)
lcd_scroller.display_message("Scroll through\nmessages")
#lcd_scroller.setup_scroll_events()
channel.basic_consume(callback, queue=queue_name, no_ack=True)
channel.start_consuming()
|
Tyler-Ward/GolemClock
|
display/mq.py
|
Python
|
mit
| 1,102
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from rest_framework import routers
from courts import views
router = routers.DefaultRouter()
router.register(r'courts', views.CourtsViewSet)
urlpatterns = patterns('',
url(r'^api/v1/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^admin/', include(admin.site.urls)),
)
|
mmmoli/open-courts
|
opencourts/urls.py
|
Python
|
mit
| 433
|
#!/usr/bin/env python3
import re
from enum import Enum
diags = []
with open('input.txt', 'r') as f:
diags = f.read().splitlines()
#--- challenge 1
gamma = ""
for i in range(0, len(diags[0])):
zeros = len([x for x in diags if x[i] == "0"])
ones = len([x for x in diags if x[i] == "1"])
gamma += "0" if zeros > ones else "1"
gamma = int(gamma, 2)
epsilon = gamma ^ 0b111111111111
print("Solution to challenge 1: {}".format(gamma * epsilon))
#--- challenge 2
class Rating(Enum):
OXYGEN = 0
CO2 = 1
def get_val(diags, rating):
for i in range(0, len(diags[0])):
zeros = len([x for x in diags if x[i] == "0"])
ones = len(diags) - zeros
if rating == Rating.OXYGEN:
check_val = "0" if zeros > ones else "1"
else:
check_val = "0" if zeros <= ones else "1"
diags = [x for x in diags if x[i] != check_val]
if len(diags) == 1:
return int(diags[0], 2)
oxygen = get_val(diags, Rating.OXYGEN)
co2 = get_val(diags, Rating.CO2)
print("Solution to challenge 2: {}".format(oxygen * co2))
|
jekhokie/scriptbox
|
python--advent-of-code/2021/3/solve.py
|
Python
|
mit
| 1,039
|
import pyspark
import operator
import sys
#311 call 2010 to present csv
#0 Unique Key,Created Date,Closed Date,Agency,Agency Name,
#5 Complaint Type,Descriptor,Location Type,Incident Zip,Incident Address,
#10 Street Name,Cross Street 1,Cross Street 2,Intersection Street 1,
#14 Intersection Street 2,Address Type,City,Landmark,Facility Type,Status,
#20 Due Date,Resolution Description,Resolution Action Updated Date,
#23 Community Board,Borough,X Coordinate (State Plane),Y Coordinate (State Plane),
#27 Park Facility Name,Park Borough,School Name,School Number,School Region,
#32 School Code,School Phone Number,School Address,School City,School State,
#37 School Zip,School Not Found,School or Citywide Complaint,Vehicle Type,
#41 Taxi Company Borough,Taxi Pick Up Location,Bridge Highway Name,
#44 Bridge Highway Direction,Road Ramp,Bridge Highway Segment,Garage Lot Name,
#48 Ferry Direction,Ferry Terminal Name,Latitude,Longitude,Location
def mapToLots(records):
import rtree
import csv
import os
os.chmod('plutoindex.idx',0777)
os.chmod('plutoindex.dat',0777)
file_index = rtree.index.Rtree('plutoindex')
for record in records:
list_record=[]
for line in csv.reader([record.encode('utf-8')]):
list_record = line
if len(list_record) < 27:
continue
compType = list_record[5].upper()
descriptor = list_record[6].upper()
if compType.count('NOISE') < 1 or descriptor.count('LOUD MUSIC/PARTY') < 1:
continue
xcoord = list_record[25].strip()
ycoord = list_record[26].strip()
if all((xcoord,ycoord)):
#check intersection
xcoord = int(xcoord)
ycoord = int(ycoord)
for lot in file_index.intersection((xcoord,ycoord),objects = True):
yield (lot.object,1)
def mapResUnits(pairs):
import pickle
with open('plutodict','rb') as fi:
pluto_dict = pickle.load(fi)
for pair in pairs:
dict_entry = pluto_dict[pair[0]]
property_value = dict_entry[1]
res_units = dict_entry[0]
if res_units < 1:
continue
yield (property_value, pair[1] / float(res_units))#pair[1] = number of noise complaints
if __name__=='__main__':
if len(sys.argv)<3:
print "Usage: <input files> <output path>"
sys.exit(-1)
sc = pyspark.SparkContext()
calls311 = sc.textFile(sys.argv[1])
output = calls311.mapPartitions(mapToLots).reduceByKey(operator.add).\
mapPartitions(mapResUnits)
output.saveAsTextFile(sys.argv[-1])
|
alejandro-mc/BDM-DDD
|
value_noisecomplaints/getLoudMusicComp.py
|
Python
|
mit
| 2,652
|
#!/usr/bin/python3
import os
import cgi
import cgitb
import json
from pprint import pprint
import setup.common as co
import setup.sqlcommon as sqlc
import setup.defaultpaths as dfp
import setup.getinput as gi
import setup.dbsetup as dbs
import setup.gettags as gt
import setup.createsongsjson as csj
import setup.createalbumsjson as calbj
import setup.createartistsjson as cartj
import setup.makealphasoup as aS
import setup.makesoup as ms
class Setup():
def __init__(self):
progpath = os.path.dirname(os.path.abspath(__file__))
p = dfp.SetupPaths()
jpath = p._get_json_paths(progpath)
dbpath = p._get_db_paths(progpath)
other = p._get_other_paths(progpath)
httpaddr = p._get_http_addr()
self.progpath = progpath
self.jpath = jpath
self.dbpath = dbpath
self.other = other
self.httpaddr = httpaddr
CF = co.CommonFunctions()
self.CF = CF
SQLC = sqlc.SqlCommon()
self.SQLC = SQLC
GI = gi.GetInputs()
self.GI = GI
def _remove_old(self):
for (p, d, files) in os.walk(self.jpath['programpath'] + "/json"):
for filename in files:
fn = os.path.join(p, filename)
if fn[:-4] == "json":
os.remove(fn)
for (p, d, files) in os.walk(self.dbpath['programpath'] + "/db"):
for filename in files:
fn = os.path.join(p, filename)
if os.path.exists(fn):
os.remove(fn)
# for (p, d, files) in os.walk(self.dbpath['programpath'] + "/music"):
# for filename in files:
# fn = os.path.join(p, filename)
# if os.path.exists(fn):
# os.remove(fn)
#
def _get_input_values(self):
inputs = self.GI._get_inputs()
return inputs
# zelda = {}
# zelda['musicpath'] = "/home/charlie/Desktop/music"
# zelda['catname'] = "CatalogA"
# zelda['hostaddress'] = "http://192.168.1.117/ampyche"
# zelda['dbname'] = "dbname"
# zelda['dbuname'] = "dbusername"
# zelda['dbpword'] = "dbpassword"
# return zelda
def run_setup(self):
self._remove_old()
z = self._get_input_values()
cclog = self.CF._create_catalog(z, self.dbpath)
ampychepath = self.httpaddr + "/" + z['catname']
print("constants setup")
AA = dbs.AmpycheDBSetup()
run1 = AA.run(z, self.dbpath)
print("dbsetup complete")
print("getting tags started")
AMP = gt.AmpycheGetTags()
run2 = AMP.run(self.progpath + "/music", self.dbpath, self.other, self.httpaddr, z['catname'], ampychepath)
print("getTags complete")
print("start of ID getting")
aas = self.SQLC._get_artist_album_song_ids(self.dbpath)
songs = aas[0]
albums = aas[1]
artists = aas[2]
print("get IDS complete")
CSJ = csj.InitSongsView()
songs60 = songs[:50]
writesongsjson = CSJ._get_init_songs_info(self.dbpath, self.jpath, songs60)
print("CSJ completed")
CALBJ = calbj.InitAlbumView()
albums40 = albums[:30]
writealbumjson = CALBJ._get_init_album_info(self.dbpath, self.jpath, albums40)
print("CALBJ completed")
CARTJ = cartj.InitArtistView()
artists30 = artists[:30]
writeartistjson = CARTJ._get_init_artist_info(self.dbpath, self.jpath, artists30)
print("CARTJ completed")
MAS = aS.MakeAlphaSoup()
AlphaSoup = MAS._make_alpha_soup(self.dbpath, self.jpath)
MS = ms.MakeSoup()
makesoup = MS.run(self.dbpath, self.jpath)
glip = "SETUP COMPLETE"
print(json.dumps(glip, sort_keys=True, indent=4))
print("Content-Type: application/json\n\n")
app = Setup()
Ampyche = app.run_setup()
|
ampyche/ampyche
|
ampyche_setup.py
|
Python
|
mit
| 3,407
|
import pytest
from gitlabform.gitlab import AccessLevel
from tests.acceptance import (
run_gitlabform,
DEFAULT_README,
get_gitlab,
)
gl = get_gitlab()
@pytest.fixture(scope="function")
def branches(request, gitlab, group_and_project):
branches = [
"protect_branch_but_allow_all",
"protect_branch_with_code_owner_approval_required",
"protect_branch_and_disallow_all",
"protect_branch_and_allow_merges",
"protect_branch_and_allow_pushes",
"protect_branch_and_allow_merges_access_levels",
"protect_branch_and_allow_pushes_access_levels",
"protect_branch_and_allowed_to_push",
"protect_branch_and_allowed_to_merge",
"protect_branch_and_allow_access_levels_with_user_ids",
"protect_branch",
]
for branch in branches:
gitlab.create_branch(group_and_project, branch, "main")
def fin():
for branch in branches:
gitlab.delete_branch(group_and_project, branch)
gitlab.set_file(
group_and_project,
"main",
"README.md",
DEFAULT_README,
"Reset default content",
)
request.addfinalizer(fin)
@pytest.fixture(scope="function")
def one_maintainer_and_two_developers(gitlab, group_and_project, users):
gitlab.add_member_to_project(
group_and_project, users[0], AccessLevel.MAINTAINER.value
)
gitlab.add_member_to_project(
group_and_project, users[1], AccessLevel.DEVELOPER.value
)
gitlab.add_member_to_project(
group_and_project, users[2], AccessLevel.DEVELOPER.value
)
yield group_and_project
# we try to remove all users, not just the 3 added above,
# on purpose, as more may have been added in the tests
for user in users:
gitlab.remove_member_from_project(group_and_project, user)
class TestBranches:
def test__protect_branch_but_allow_all(self, gitlab, group_and_project, branches):
protect_branch_but_allow_all = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_but_allow_all:
protected: true
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(protect_branch_but_allow_all, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch_but_allow_all")
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is True
# @pytest.mark.skipif(
# gl.has_no_license(), reason="this test requires a GitLab license (Paid/Trial)"
# )
# def test__code_owners_approval(self, gitlab, group_and_project, branches):
# group_and_project = group_and_project
#
# branch_access_levels = gitlab.get_branch_access_levels(
# group_and_project, "protect_branch_but_allow_all"
# )
# assert branch_access_levels["code_owner_approval_required"] is False
#
# protect_branch_with_code_owner_approval_required = f"""
# projects_and_groups:
# {group_and_project}:
# branches:
# protect_branch_with_code_owner_approval_required:
# protected: true
# developers_can_push: false
# developers_can_merge: true
# code_owner_approval_required: true
# """
#
# run_gitlabform(
# protect_branch_with_code_owner_approval_required, group_and_project
# )
#
# branch_access_levels = gitlab.get_branch_access_levels(
# group_and_project, "protect_branch_with_code_owner_approval_required"
# )
# assert branch_access_levels["code_owner_approval_required"] is True
def test__protect_branch_and_disallow_all(
self, gitlab, group_and_project, branches
):
protect_branch_and_disallow_all = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_disallow_all:
protected: true
developers_can_push: false
developers_can_merge: false
"""
run_gitlabform(protect_branch_and_disallow_all, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch_and_disallow_all")
assert branch["protected"] is True
assert branch["developers_can_push"] is False
assert branch["developers_can_merge"] is False
def test__mixed_config(self, gitlab, group_and_project, branches):
mixed_config = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_allow_merges:
protected: true
developers_can_push: false
developers_can_merge: true
protect_branch_and_allow_pushes:
protected: true
developers_can_push: true
developers_can_merge: false
"""
run_gitlabform(mixed_config, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch_and_allow_merges")
assert branch["protected"] is True
assert branch["developers_can_push"] is False
assert branch["developers_can_merge"] is True
branch = gitlab.get_branch(group_and_project, "protect_branch_and_allow_pushes")
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is False
unprotect_branches = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_allow_merges:
protected: false
protect_branch_and_allow_pushes:
protected: false
"""
run_gitlabform(unprotect_branches, group_and_project)
for branch in [
"protect_branch_and_allow_merges",
"protect_branch_and_allow_pushes",
]:
branch = gitlab.get_branch(group_and_project, branch)
assert branch["protected"] is False
def test__mixed_config_with_new_api(
self,
gitlab,
group_and_project,
branches,
users,
one_maintainer_and_two_developers,
):
mixed_config_with_access_levels = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_allow_merges_access_levels:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.DEVELOPER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
'*_allow_pushes_access_levels':
protected: true
push_access_level: {AccessLevel.DEVELOPER.value}
merge_access_level: {AccessLevel.DEVELOPER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(mixed_config_with_access_levels, group_and_project)
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project, "protect_branch_and_allow_merges_access_levels"
)
assert push_access_levels == [AccessLevel.NO_ACCESS.value]
assert merge_access_levels == [AccessLevel.DEVELOPER.value]
assert push_access_user_ids == []
assert merge_access_user_ids == []
assert unprotect_access_level is AccessLevel.MAINTAINER.value
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project, "*_allow_pushes_access_levels"
)
assert push_access_levels == [AccessLevel.DEVELOPER.value]
assert merge_access_levels == [AccessLevel.DEVELOPER.value]
assert push_access_user_ids == []
assert merge_access_user_ids == []
assert unprotect_access_level is AccessLevel.MAINTAINER.value
mixed_config_with_access_levels_update = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_allow_merges_access_levels:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
'*_allow_pushes_access_levels':
protected: true
push_access_level: {AccessLevel.MAINTAINER.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(mixed_config_with_access_levels_update, group_and_project)
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project, "protect_branch_and_allow_merges_access_levels"
)
assert push_access_levels == [AccessLevel.NO_ACCESS.value]
assert merge_access_levels == [AccessLevel.MAINTAINER.value]
assert push_access_user_ids == []
assert merge_access_user_ids == []
assert unprotect_access_level is AccessLevel.MAINTAINER.value
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project, "*_allow_pushes_access_levels"
)
assert push_access_levels == [AccessLevel.MAINTAINER.value]
assert merge_access_levels == [AccessLevel.MAINTAINER.value]
assert push_access_user_ids == []
assert merge_access_user_ids == []
assert unprotect_access_level is AccessLevel.MAINTAINER.value
mixed_config_with_access_levels_unprotect_branches = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_allow_merges_access_levels:
protected: false
'*_allow_pushes_access_levels':
protected: false
"""
run_gitlabform(
mixed_config_with_access_levels_unprotect_branches, group_and_project
)
for branch in [
"protect_branch_and_allow_merges_access_levels",
"protect_branch_and_allow_pushes_access_levels",
]:
branch = gitlab.get_branch(group_and_project, branch)
assert branch["protected"] is False
@pytest.mark.skipif(
gl.has_no_license(), reason="this test requires a GitLab license (Paid/Trial)"
)
def test__allow_user_ids(
self,
gitlab,
group_and_project,
branches,
users,
one_maintainer_and_two_developers,
):
user_allowed_to_push_id = gitlab.get_user_to_protect_branch(users[0])
user_allowed_to_merge_id = gitlab.get_user_to_protect_branch(users[1])
user_allowed_to_push_and_allowed_to_merge_id = (
gitlab.get_user_to_protect_branch(users[2])
)
# testing allowed_to_push and allowed_to_merge for user support on protect branch (gitlab premium feature)
mixed_config_with_allowed_to_push_and_merge = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_allowed_to_merge:
protected: true
allowed_to_push:
- access_level: {AccessLevel.NO_ACCESS.value}
allowed_to_merge:
- access_level: {AccessLevel.DEVELOPER.value}
- user_id: {user_allowed_to_merge_id}
- user: {users[2]}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
'*_and_allowed_to_push':
protected: true
allowed_to_push:
- access_level: {AccessLevel.DEVELOPER.value}
- user_id: {user_allowed_to_push_id}
- user: {users[1]}
allowed_to_merge:
- access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.DEVELOPER.value}
"""
run_gitlabform(mixed_config_with_allowed_to_push_and_merge, group_and_project)
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project, "protect_branch_and_allowed_to_merge"
)
assert push_access_levels == [AccessLevel.NO_ACCESS.value]
assert merge_access_levels == [AccessLevel.DEVELOPER.value]
current_push_access_user_ids = []
current_push_access_user_ids.sort()
assert push_access_user_ids == current_push_access_user_ids
current_merge_access_user_ids = [
user_allowed_to_merge_id,
user_allowed_to_push_and_allowed_to_merge_id,
]
current_merge_access_user_ids.sort()
assert merge_access_user_ids == current_merge_access_user_ids
assert unprotect_access_level is AccessLevel.MAINTAINER.value
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project, "*_and_allowed_to_push"
)
assert push_access_levels == [AccessLevel.DEVELOPER.value]
assert merge_access_levels == [AccessLevel.MAINTAINER.value]
current_push_access_user_ids = [
user_allowed_to_push_id,
user_allowed_to_merge_id,
]
current_push_access_user_ids.sort()
assert push_access_user_ids == current_push_access_user_ids
current_merge_access_user_ids = []
current_merge_access_user_ids.sort()
assert merge_access_user_ids == current_merge_access_user_ids
assert unprotect_access_level is AccessLevel.DEVELOPER.value
mixed_config_with_allowed_to_push_and_merge_update = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_allowed_to_merge:
protected: true
allowed_to_push:
- access_level: {AccessLevel.NO_ACCESS.value}
allowed_to_merge:
- access_level: {AccessLevel.MAINTAINER.value}
- user_id: {user_allowed_to_merge_id}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
'*_and_allowed_to_push':
protected: true
allowed_to_push:
- access_level: {AccessLevel.MAINTAINER.value}
- user_id: {user_allowed_to_push_id}
- user: {users[2]}
- user: {users[1]}
allowed_to_merge:
- access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(
mixed_config_with_allowed_to_push_and_merge_update, group_and_project
)
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project, "protect_branch_and_allowed_to_merge"
)
assert push_access_levels == [AccessLevel.NO_ACCESS.value]
assert merge_access_levels == [AccessLevel.MAINTAINER.value]
current_push_access_user_ids = []
current_push_access_user_ids.sort()
assert push_access_user_ids == current_push_access_user_ids
current_merge_access_user_ids = [user_allowed_to_merge_id]
current_merge_access_user_ids.sort()
assert merge_access_user_ids == current_merge_access_user_ids
assert unprotect_access_level is AccessLevel.MAINTAINER.value
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project, "*_and_allowed_to_push"
)
assert push_access_levels == [AccessLevel.MAINTAINER.value]
assert merge_access_levels == [AccessLevel.MAINTAINER.value]
current_push_access_user_ids = [
user_allowed_to_push_id,
user_allowed_to_merge_id,
user_allowed_to_push_and_allowed_to_merge_id,
]
current_push_access_user_ids.sort()
assert push_access_user_ids == current_push_access_user_ids
current_merge_access_user_ids = []
current_merge_access_user_ids.sort()
assert merge_access_user_ids == current_merge_access_user_ids
assert unprotect_access_level is AccessLevel.MAINTAINER.value
mixed_config_with_allow_access_levels_with_user_ids = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_allow_access_levels_with_user_ids:
protected: true
push_access_level: {AccessLevel.DEVELOPER.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
allowed_to_push:
- access_level: {AccessLevel.MAINTAINER.value}
- user_id: {user_allowed_to_push_id}
- user: {users[2]}
allowed_to_merge:
- access_level: {AccessLevel.DEVELOPER.value}
- user_id: {user_allowed_to_merge_id}
- user: {users[0]}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(
mixed_config_with_allow_access_levels_with_user_ids, group_and_project
)
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project,
"protect_branch_and_allow_access_levels_with_user_ids",
)
assert push_access_levels == [
AccessLevel.DEVELOPER.value,
AccessLevel.MAINTAINER.value,
]
assert merge_access_levels == [
AccessLevel.DEVELOPER.value,
AccessLevel.MAINTAINER.value,
]
current_push_access_user_ids = [
user_allowed_to_push_id,
user_allowed_to_push_and_allowed_to_merge_id,
]
current_push_access_user_ids.sort()
assert push_access_user_ids == current_push_access_user_ids
current_merge_access_user_ids = [
user_allowed_to_merge_id,
user_allowed_to_push_id,
]
current_merge_access_user_ids.sort()
assert merge_access_user_ids == current_merge_access_user_ids
assert unprotect_access_level is AccessLevel.MAINTAINER.value
def test_protect_branch_with_old_api_next_update_with_new_api_and_unprotect(
self, gitlab, group_and_project, branches
):
config_protect_branch_with_old_api = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: true
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(config_protect_branch_with_old_api, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch")
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is True
config_protect_branch_with_new_api = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(config_protect_branch_with_new_api, group_and_project)
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(group_and_project, "protect_branch")
assert push_access_levels == [AccessLevel.NO_ACCESS.value]
assert merge_access_levels == [AccessLevel.MAINTAINER.value]
assert push_access_user_ids == []
assert merge_access_user_ids == []
assert unprotect_access_level is AccessLevel.MAINTAINER.value
config_protect_branch_unprotect = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: false
"""
run_gitlabform(config_protect_branch_unprotect, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch")
assert branch["protected"] is False
def test_protect_branch_with_new_api_next_update_with_old_api_and_unprotect(
self, gitlab, group_and_project, branches
):
config_protect_branch_with_new_api = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(config_protect_branch_with_new_api, group_and_project)
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(group_and_project, "protect_branch")
assert push_access_levels == [AccessLevel.NO_ACCESS.value]
assert merge_access_levels == [AccessLevel.MAINTAINER.value]
assert push_access_user_ids == []
assert merge_access_user_ids == []
assert unprotect_access_level is AccessLevel.MAINTAINER.value
config_protect_branch_with_old_api = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: true
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(config_protect_branch_with_old_api, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch")
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is True
config_protect_branch_unprotect = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: false
"""
run_gitlabform(config_protect_branch_unprotect, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch")
assert branch["protected"] is False
@pytest.mark.skipif(
gl.has_no_license(), reason="this test requires a GitLab license (Paid/Trial)"
)
def test_protect_branch_with_old_api_next_update_with_new_api_and_userid_and_unprotect(
self,
gitlab,
group_and_project,
branches,
users,
one_maintainer_and_two_developers,
):
config_protect_branch_with_old_api = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: true
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(config_protect_branch_with_old_api, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch")
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is True
user_allowed_to_push_id = gitlab.get_user_to_protect_branch(users[0])
user_allowed_to_merge_id = gitlab.get_user_to_protect_branch(users[1])
config_protect_branch_with_new_api = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: true
push_access_level: {AccessLevel.DEVELOPER.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
allowed_to_push:
- access_level: {AccessLevel.MAINTAINER.value}
- user_id: {user_allowed_to_push_id}
- user: {users[1]}
allowed_to_merge:
- access_level: {AccessLevel.MAINTAINER.value}
- user_id: {user_allowed_to_merge_id}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(config_protect_branch_with_new_api, group_and_project)
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(group_and_project, "protect_branch")
assert push_access_levels == [
AccessLevel.DEVELOPER.value,
AccessLevel.MAINTAINER.value,
]
assert merge_access_levels == [AccessLevel.MAINTAINER.value]
current_push_access_user_ids = [
user_allowed_to_push_id,
user_allowed_to_merge_id,
]
current_push_access_user_ids.sort()
assert push_access_user_ids == current_push_access_user_ids
current_merge_access_user_ids = [user_allowed_to_merge_id]
current_merge_access_user_ids.sort()
assert merge_access_user_ids == current_merge_access_user_ids
assert unprotect_access_level is AccessLevel.MAINTAINER.value
config_protect_branch_unprotect = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: false
"""
run_gitlabform(config_protect_branch_unprotect, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch")
assert branch["protected"] is False
def test_unprotect_when_the_rest_of_the_parameters_are_still_specified_old_api(
self, gitlab, group_and_project, branches
):
config_protect_branch_with_old_api = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: true
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(config_protect_branch_with_old_api, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch")
assert branch["protected"] is True
assert branch["developers_can_push"] is True
assert branch["developers_can_merge"] is True
config_unprotect_branch_with_old_api = f"""
gitlab:
api_version: 4
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: false
developers_can_push: true
developers_can_merge: true
"""
run_gitlabform(config_unprotect_branch_with_old_api, group_and_project)
branch = gitlab.get_branch(group_and_project, "protect_branch")
assert branch["protected"] is False
def test_unprotect_when_the_rest_of_the_parameters_are_still_specified_new_api(
self, gitlab, group_and_project, branches
):
config_protect_branch_with_new_api = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: true
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(config_protect_branch_with_new_api, group_and_project)
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(group_and_project, "protect_branch")
assert push_access_levels == [AccessLevel.NO_ACCESS.value]
assert merge_access_levels == [AccessLevel.MAINTAINER.value]
assert push_access_user_ids == []
assert merge_access_user_ids == []
assert unprotect_access_level is AccessLevel.MAINTAINER.value
config_unprotect_branch_with_new_api = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch:
protected: false
push_access_level: {AccessLevel.NO_ACCESS.value}
merge_access_level: {AccessLevel.MAINTAINER.value}
unprotect_access_level: {AccessLevel.MAINTAINER.value}
"""
run_gitlabform(config_unprotect_branch_with_new_api, group_and_project)
# old API
branch = gitlab.get_branch(group_and_project, "protect_branch")
assert branch["protected"] is False
# new API
(
push_access_levels,
merge_access_levels,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(group_and_project, "protect_branch")
assert push_access_levels is None
assert merge_access_levels is None
assert push_access_user_ids is None
assert merge_access_user_ids is None
assert unprotect_access_level is None
def test__config_with_access_level_names(self, gitlab, group_and_project, branches):
config_with_access_levels_names = f"""
projects_and_groups:
{group_and_project}:
branches:
protect_branch_and_allow_merges_access_levels:
protected: true
push_access_level: no_access # note "_" or " " and the various
merge_access_level: Developer # case in each line. it should not
unprotect_access_level: MAINTAINER # matter as we allow any case.
"""
run_gitlabform(config_with_access_levels_names, group_and_project)
(
push_access_level,
merge_access_level,
push_access_user_ids,
merge_access_user_ids,
unprotect_access_level,
) = gitlab.get_only_branch_access_levels(
group_and_project, "protect_branch_and_allow_merges_access_levels"
)
assert push_access_level == [AccessLevel.NO_ACCESS.value]
assert merge_access_level == [AccessLevel.DEVELOPER.value]
assert push_access_user_ids == []
assert merge_access_user_ids == []
assert unprotect_access_level is AccessLevel.MAINTAINER.value
|
egnyte/gitlabform
|
tests/acceptance/test_branches.py
|
Python
|
mit
| 32,440
|
#!/usr/bin/python
# TODO: issues with new oauth2 stuff. Keep using older version of Python for now.
# #!/usr/bin/env python
from participantCollection import ParticipantCollection
import re
import datetime
import pyperclip
# Edit Me!
# This script gets run on the first day of the following month, and that month's URL is
# what goes here. E.g. If this directory is the directory for February, this script gets
# run on March 1, and this URL is the URL for the March challenge page.
nextMonthURL = "https://www.reddit.com/r/pornfree/comments/ex6nis/stay_clean_february_this_thread_updated_daily/"
# If this directory is the directory for November, this script gets run on December 1,
# and currentMonthIndex gets the index of November, i.e. 11.
currentMonthIndex = datetime.date.today().month - 1
if currentMonthIndex == 0:
currentMonthIndex = 12
currentMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[currentMonthIndex]
nextMonthIndex = currentMonthIndex % 12 + 1
nextMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[nextMonthIndex]
participants = ParticipantCollection()
numberStillIn = participants.sizeOfParticipantsWhoAreStillIn()
initialNumber = participants.size()
percentStillIn = int(round(100 * numberStillIn / initialNumber, 0))
def templateForParticipants():
answer = ""
for participant in participants.participantsWhoAreStillInAndHaveCheckedIn():
answer += "/u/" + participant.name
answer += "\n\n"
return answer
def templateToUse():
answer = ""
answer += "The Stay Clean CURRENT_MONTH_NAME challenge is now over. Join us for **[the NEXT_MONTH_NAME challenge](NEXT_MONTH_URL)**.\n"
answer += "\n"
answer += "**NUMBER_STILL_IN** out of INITIAL_NUMBER participants made it all the way through the challenge. That's **PERCENT_STILL_IN%**.\n"
answer += "\n"
answer += "Congratulations to these participants, all of whom were victorious:\n\n"
answer += templateForParticipants()
return answer
def stringToPrint():
answer = templateToUse()
answer = re.sub('NUMBER_STILL_IN', str(numberStillIn), answer)
answer = re.sub('INITIAL_NUMBER', str(initialNumber), answer)
answer = re.sub('PERCENT_STILL_IN', str(percentStillIn), answer)
answer = re.sub('CURRENT_MONTH_INDEX', str(currentMonthIndex), answer)
answer = re.sub('CURRENT_MONTH_NAME', currentMonthName, answer)
answer = re.sub('NEXT_MONTH_INDEX', str(nextMonthIndex), answer)
answer = re.sub('NEXT_MONTH_NAME', nextMonthName, answer)
answer = re.sub('NEXT_MONTH_URL', nextMonthURL, answer)
return answer
outputString = stringToPrint()
print "============================================================="
print outputString
print "============================================================="
pyperclip.copy(outputString)
|
foobarbazblarg/stayclean
|
stayclean-2020-january/display-final-after-month-is-over.py
|
Python
|
mit
| 3,056
|
#!/usr/bin/env python2
# coding:utf-8
from twython import Twython
from colors import const
#import const
import numpy as np
import PIL.Image as img
import colorsys
import StringIO
import os
from datetime import datetime
from datetime import timedelta
from random import randint
number_of_colours = 1094
def is_morning():
return 6 <= (datetime.utcnow() + timedelta(hours=9)).hour <= 9
class Colour(object):
def __init__(self, name, hexcode, url):
self.name = name
# 0-255
self.hexcode = hexcode
self.rgb = tuple(
int(hexcode[i:i+2],16) for i in range(0,6,2)
)
self.hsv = tuple(
colorsys.rgb_to_hsv(*map(lambda x: x/255.0, self.rgb)))
self.url = url or "https://en.wikipedia.org/wiki/{}".format(
name.replace(' ','_'))
@staticmethod
def from_string(line):
name,code,url = line.strip('\n').split('\t')
return Colour(name, code, url)
def to_string(self):
hsv_to_show = [
int(self.hsv[0]*360+0.5),
int(self.hsv[1]*100+0.5),
int(self.hsv[2]*100+0.5)
]
hsv_str = "({}°, {}%, {}%)".format(*hsv_to_show)
text = "{name} [hex:{code}, RGB:{rgb}, HSV:{hsv}] ({link})".format(
name=self.name,
code=self.hexcode,
rgb=self.rgb,
hsv=hsv_str,
link=self.url)
return text
def to_image(self, size):
colordata = np.array(list(self.rgb)*(size*size),
dtype=np.uint8).reshape(size,size,3)
colorpic = img.fromarray(colordata)
picdata = StringIO.StringIO()
colorpic.save(picdata,format='png')
picdata.seek(0)
return picdata
def is_light(self):
return self.hsv[2] > 0.5
class ColoursBot(object):
def __init__(self, keys=const.keys, size=200,
ncolour = number_of_colours,
fileloc=os.path.dirname(__file__)+'/colors_simp_with_link.txt'):
try:
self.api = Twython(keys['api_key'],keys['api_secret'],
keys['access_token'], keys['access_token_secret'])
except Exception as e:
print("An error occured in initialization.\n{}".format(e))
self.ncolour=ncolour
self.fileloc=fileloc
self.size=size
with open(fileloc, 'r') as f:
self.colors = list(map(Colour.from_string,f))
def pick_colour(self):
if is_morning():
colors = list(filter(lambda c: c.is_light(), self.colors))
else:
colors = self.colors
n_max = len(colors)
return colors[randint(0,n_max-1)]
def update(self):
c = self.pick_colour()
text = c.to_string()
picdata = c.to_image(self.size)
# https://twython.readthedocs.org/en/latest/usage/advanced_usage.html
self.api.update_status_with_media(
status=text, media=picdata)
return c
if __name__ == "__main__":
a = ColoursBot()
print(a.update())
|
lesguillemets/gae_twbots
|
colors/colors.py
|
Python
|
mit
| 3,088
|
import datetime as dt
import threading
from serial_device.or_event import OrEvent
import numpy as np
import pandas as pd
import gobject
import gtk
import matplotlib as mpl
from streaming_plot import StreamingPlot
from ...max11210_adc_ui import MAX11210_read
import logging
def _generate_data(stop_event, data_ready, data):
'''
Generate random data to emulate, e.g., reading data from ADC.
The function is an example implementation of a ``f_data`` function
suitable for use with the :func:`measure_dialog` function.
Example usage
-------------
The following launches a measurement dialog which plots 5 points every
0.5 seconds, runs for 5 seconds, after which the dialog closes
automatically:
>>> data = measure_dialog(_generate_data, duration_s=5000, auto_close=True)
Parameters
----------
stop_event : threading.Event
Function returns when :data:`stop_event` is set.
data_ready : threading.Event
Function sets :data:`data_ready` whenever new data is available.
data : list
Function appends new data to :data:`data` before setting
:data:`data_ready`.
'''
delta_t = dt.timedelta(seconds=.1)
samples_per_plot = 5
while True:
time_0 = dt.datetime.now()
values_i = np.random.rand(samples_per_plot)
absolute_times_i = pd.Series([time_0 + i * delta_t
for i in xrange(len(values_i))])
data_i = pd.Series(values_i, index=absolute_times_i)
data.append(data_i)
data_ready.set()
if stop_event.wait(samples_per_plot *
delta_t.total_seconds()):
break
def measure_dialog(f_data, duration_s=None, auto_start=True,
auto_close=True, **kwargs):
'''
Launch a GTK dialog and plot data
Parameters
----------
f_data : function(stop_event, data_ready, data)
Function to run to generate data, e.g., read data from ADC.
The function is run in its own thread and is provided the following
parameters:
- :data:`stop_event` : threading.Event
- :data:`data_ready` : threading.Event
- :data:`data` : list
The function **MUST**:
- Return when the :data:`stop_event` is set.
- Set :data:`data_ready` event whenever new data is available.
duration_s : float, optional
Length of time to measure for (in seconds).
If duration is not specified, measure until window is closed or
``Pause`` button is pressed.
auto_start : bool, optional
Automatically start measuring when the dialog is launched.
Default is ``True``.
auto_close : bool, optional
If ``duration_s`` is specified, automatically close window once the
measurement duration has passed (unless the ``Pause`` button has been
pressed.
Default is ``True``.
**kwargs : dict
Additional keyword arguments are passed to the construction of the
:class:`streaming_plot.StreamingPlot` view.
'''
# `StreamingPlot` class uses threads. Need to initialize GTK to use
# threads. See [here][1] for more information.
#
# [1]: http://faq.pygtk.org/index.py?req=show&file=faq20.001.htp
gtk.gdk.threads_init()
with mpl.style.context('seaborn',
{'image.cmap': 'gray',
'image.interpolation' : 'none'}):
# Create dialog window to wrap PMT measurement view widget.
dialog = gtk.Dialog()
dialog.set_default_size(800, 600)
view = StreamingPlot(data_func=f_data, **kwargs)
dialog.get_content_area().pack_start(view.widget, True, True)
dialog.connect('check-resize', lambda *args: view.on_resize())
dialog.set_position(gtk.WIN_POS_MOUSE)
dialog.show_all()
view.fig.tight_layout()
if auto_start:
gobject.idle_add(view.start)
def _auto_close(*args):
if not view.stop_event.is_set():
# User did not explicitly pause the measurement. Automatically
# close the measurement and continue.
dialog.destroy()
measurement_complete = threading.Event()
view.widget.connect('destroy', lambda *args: measurement_complete.set())
if duration_s is not None:
def _schedule_stop(*args):
event = OrEvent(view.stop_event, view.started,
measurement_complete)
event.wait()
if view.started.is_set():
stop_func = _auto_close if auto_close else view.pause
gobject.timeout_add(duration_s * 1000, stop_func)
stop_schedule_thread = threading.Thread(target=_schedule_stop)
stop_schedule_thread.daemon = True
stop_schedule_thread.start()
dialog.run()
dialog.destroy()
measurement_complete.wait()
if view.data:
return pd.concat(view.data)
else:
return None
return False
def adc_data_func_factory(proxy, delta_t=dt.timedelta(seconds=1), adc_rate=1,
resistor_val = False):
'''
Parameters
----------
proxy : mr_box_peripheral_board.SerialProxy
delta_t : datetime.timedelta
Time between ADC measurements.
Returns
-------
function
Function suitable for use with the :func:`measure_dialog` function.
'''
#set the adc digital gain
# proxy.MAX11210_setGain(adc_dgain)
#Set the pmt shutter pin to output
proxy.pin_mode(9, 1)
logger = logging.getLogger(__name__)
def _read_adc(stop_event, data_ready, data):
'''
Parameters
----------
stop_event : threading.Event
Function returns when :data:`stop_event` is set.
data_ready : threading.Event
Function sets :data:`data_ready` whenever new data is available.
data : list
Function appends new data to :data:`data` before setting
:data:`data_ready`.
delta_t = dt.timedelta(seconds=.1)
'''
#Start the ADC
try:
proxy.pmt_open_shutter()
logger.info('PMT Shutter Opened')
adc_dgain = 1
while True:
data_i = MAX11210_read(proxy, rate=adc_rate,
duration_s=delta_t.total_seconds())
#Convert data to Voltage, 24bit ADC with Vref = 3.0 V
data_i /= ((2 ** 24 - 1)/(3.0/adc_dgain))
if (resistor_val):
#Convert Voltage to Current, 30kOhm Resistor
data_i /= 30e3
else:
#Convert Voltage to Current, 300kOhm Resistor
data_i /= 300e3
# Set name to display units.
data_i.name = 'Current (A)'
data.append(data_i)
data_ready.set()
if stop_event.is_set():
break
finally:
proxy.pmt_close_shutter()
logger.info('PMT Shutter Closed')
return _read_adc
|
wheeler-microfluidics/mr-box-peripheral-board.py
|
mr_box_peripheral_board/ui/gtk/measure_dialog.py
|
Python
|
mit
| 7,247
|
import os
import shutil
from jinja2 import Environment, PackageLoader
import html
import xml.etree.ElementTree as et
class moodle_module:
def __init__(self, **kwargs):
self.backup = kwargs['backup']
self.temp_dir = kwargs['temp_dir']
self.db = kwargs['db']
self.directory = kwargs['directory']
self.final_dir = kwargs['working_dir']
self.db_cursor = self.db.cursor()
query = "CREATE TABLE IF NOT EXISTS pages (activityid int, moduleid int, contextid int, name text, content text)"
self.db_cursor.execute(query)
self.db.commit()
self.env = Environment(loader=PackageLoader(
'mbzextract.plugins.page', 'templates'))
def parse(self):
page_xml = et.parse(self.backup.open(
self.directory + "/page.xml")).getroot()
inforef_xml = et.parse(self.backup.open(
self.directory + "/inforef.xml")).getroot()
page = (page_xml.get('id'),
page_xml.get('moduleid'),
page_xml.get('contextid'),
page_xml.find('./page/name').text,
html.unescape(page_xml.find('./page/content').text))
self.name = page_xml.find('./page/name').text
self.db_cursor.execute(
"INSERT INTO pages VALUES(?,?,?,?,?)", page)
self.db.commit()
self.current_id = page_xml.get('id')
# create a list of files
self.files = self.backup.list_files(inforef_xml, self.db_cursor)
def extract(self):
self.db_cursor.execute('SELECT name,content FROM pages WHERE activityid=?',(self.current_id,))
results = self.db_cursor.fetchone()
template = self.env.get_template('page.html')
output = (template.render(name=results[0],content=results[1]))
path = os.path.join(self.final_dir, self.backup.stripped(self.name))
if os.path.exists(path) == False:
os.makedirs(path)
os.chdir(path)
# write the page
f = open("page.html",'w+')
f.write(output)
f.close()
# files
for fileid in self.files:
self.db_cursor.execute(
'SELECT contenthash,filename FROM files WHERE filename != "." AND id=?', (fileid,))
results = self.db_cursor.fetchone()
if results is not None:
os.chdir(self.temp_dir)
self.backup.extract_file(
results[0], os.path.join(path, results[1]))
|
ocdude/mbzextract
|
mbzextract/plugins/page/page.py
|
Python
|
mit
| 2,508
|
#!/usr/bin/env python3
from shutil import copyfile
import glob
import os
sql = './sql'
expected = './expected'
NEW_FILES = ['native_features']
for file in NEW_FILES:
filelist = glob.glob(f"{sql}/*{file}.sql")
for path in filelist:
try:
os.remove(path)
except:
print("Error while deleting file : ", path)
filelist = glob.glob(f"{expected}/*{file}.out")
for path in filelist:
try:
os.remove(path)
except:
print("Error while deleting file : ", path)
files = {}
for filename in os.listdir(sql):
split_filename = filename.split("_", 1)
number = int(split_filename[0])
files[number] = split_filename[1]
max_file_num = max(files.keys())
def construct_filename(n, name):
return f"{str(n).zfill(2)}_{name}"
contents = """
SET client_min_messages = warning;
DO $$
BEGIN
IF current_setting('server_version_num')::INT >= 100000 THEN
SET session_replication_role TO replica;
ELSE
CREATE EXTENSION pglogical;
END IF;
END$$;
CREATE EXTENSION pgl_ddl_deploy;
CREATE OR REPLACE FUNCTION pgl_ddl_deploy.override() RETURNS BOOLEAN AS $BODY$
BEGIN
RETURN TRUE;
END;
$BODY$
LANGUAGE plpgsql IMMUTABLE;
INSERT INTO pgl_ddl_deploy.queue (queued_at,role,pubnames,message_type,message)
VALUES (now(),current_role,'{mock}'::TEXT[],pgl_ddl_deploy.queue_ddl_message_type(),'CREATE TABLE nativerox(id int)');
INSERT INTO pgl_ddl_deploy.queue (queued_at,role,pubnames,message_type,message)
VALUES (now(),current_role,'{mock}'::TEXT[],pgl_ddl_deploy.queue_ddl_message_type(),'ALTER TABLE nativerox ADD COLUMN bar text;');
INSERT INTO pgl_ddl_deploy.queue (queued_at,role,pubnames,message_type,message)
VALUES (now(),current_role,'{mock}'::TEXT[],pgl_ddl_deploy.queue_ddl_message_type(),$$SELECT pgl_ddl_deploy.notify_subscription_refresh('mock', true);$$);
DO $$
DECLARE v_ct INT;
BEGIN
IF current_setting('server_version_num')::INT >= 100000 THEN
SELECT COUNT(1) INTO v_ct FROM information_schema.columns WHERE table_name = 'nativerox';
RAISE LOG 'v_ct: %', v_ct;
IF v_ct != 2 THEN
RAISE EXCEPTION 'Count does not match expected: v_ct: %', v_ct;
END IF;
SELECT COUNT(1) INTO v_ct FROM pgl_ddl_deploy.subscriber_logs;
IF v_ct != 1 THEN
RAISE EXCEPTION 'Count does not match expected: v_ct: %', v_ct;
END IF;
PERFORM pgl_ddl_deploy.retry_all_subscriber_logs();
SELECT (SELECT COUNT(1) FROM pgl_ddl_deploy.subscriber_logs WHERE NOT succeeded) +
(SELECT COUNT(1) FROM pgl_ddl_deploy.subscriber_logs WHERE error_message ~* 'No subscription to publication mock exists') INTO v_ct;
IF v_ct != 3 THEN
RAISE EXCEPTION 'Count does not match expected: v_ct: %', v_ct;
END IF;
ELSE
SELECT COUNT(1) INTO v_ct FROM pgl_ddl_deploy.subscriber_logs;
IF v_ct != 0 THEN
RAISE EXCEPTION 'Count does not match expected: v_ct: %', v_ct;
END IF;
END IF;
END$$;
"""
fname = construct_filename(max_file_num + 1, 'native_features')
with open(f"{sql}/{fname}.sql", "w") as newfile:
newfile.write(contents)
copyfile(f"{sql}/{fname}.sql", f"{expected}/{fname}.out")
|
enova/pgl_ddl_deploy
|
generate_new_native_tests.py
|
Python
|
mit
| 3,132
|
#!/usr/bin/env python
# encoding: utf-8
'''
cmd_options -- Command Line Options Handler for JET MSS
'''
import os
import sys
import logging
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
from os.path import dirname, expanduser, join, split, splitext, isabs
from subprocess import Popen, PIPE
from sys import argv, exit
__pgmname__ = os.path.splitext(os.path.basename(argv[0]))[0]
log = logging.getLogger(__pgmname__)
try:
_p = Popen(["git", "describe", "HEAD", "--long", "--tags"],
cwd=dirname(dirname(__file__)),
stdout=PIPE)
__version__ = _p.communicate()[0].strip('\n').strip()
except Exception, e:
log.error("Could not get revision number: {}".format(e))
__version__ = 'Unknown'
try:
_p = Popen(["git", "log", "-1", "--format=%cd", "--date=local"],
cwd=dirname(dirname(__file__)),
stdout=PIPE)
__commit_date__ = _p.communicate()[0].strip('\n').strip()
except Exception, e:
log.error("Could not get commit date: {}".format(e))
__commit_date__ = 'Unknown'
_p = None
program_version_message = '%%(prog)s %s (%s)' % (__version__, __commit_date__)
program_license = '''
Created by AJ Reynolds.
Copyright 2016 AJ Reynolds. All rights reserved.
Licensed under the Creative Commons Zero (CC0) License
Distributed on an "AS IS" basis without warranties
or conditions of any kind, either express or implied.
USAGE
'''
class CmdOptions(ArgumentParser):
'''Define Standard Options for All Command lines.'''
def __init__(self, **kwargs):
super(CmdOptions, self).__init__(self, **kwargs)
log_file = os.path.splitext(os.path.basename(argv[0]))[0] + '.log'
if log_file[:2] == '__':
log_file = os.path.basename(os.path.dirname(argv[0])) + '.log'
self.parser = ArgumentParser(description=program_license,
formatter_class=RawDescriptionHelpFormatter,
conflict_handler='resolve')
self.parser.add_argument('-V', '--version',
action='version',
version=program_version_message)
self.parser.add_argument('--logdir', action='store',
dest='logdir', default='/srv/log',
help='Specify a log directory [default: %(default)s]')
self.parser.add_argument('--logfile', action='store',
dest='logfile',
default=log_file,
help='Specify a custom logfile filename [default: %(default)s]')
self.parser.add_argument("--Error-Log", dest="errorlog", action="store_true", default=False,
help="Create Seperate Log for Errors")
self.parser.add_argument('--session_log', dest="session_log",
action="store_false", default=True,
help="paths to folder(s) with config file(s) ")
self.parser.add_argument('pathname', metavar="pathname", nargs='*',
help="paths to folder(s)/file(s) ") # Setup argument parser
group_loglvl = self.parser.add_mutually_exclusive_group()
group_loglvl.add_argument("--verbose", dest="loglevel",
action="store_const", const="VERBOSE",
default='INFO',
help="increase logging to include additional informational information")
group_loglvl.add_argument("--debug", dest="loglevel",
action="store_const", const="DEBUG",
help="increase logging to include debugging information")
group_loglvl.add_argument("--trace", dest="loglevel",
action="store_const", const="TRACE",
help="increase logging to include trace information")
group_loglvl.add_argument("--quiet", dest="loglevel",
action="store_const", const="WARNING",
help="Limit logging to only include Warning, Errors, and Critical information")
group_loglvl.add_argument("--errors", dest="loglevel",
action="store_const", const="ERROR",
help="Limit logging to only include Errors and Critical information")
if __name__ == "__main__":
opt = CmdOptions()
args = opt.parser.parse_args(argv[1:])
log.info(args)
exit(0)
|
stampedeboss/lights
|
lights/cmdoptions.py
|
Python
|
cc0-1.0
| 3,918
|
from decimal import Decimal
import logging
logger = logging.getLogger(__name__)
def check_number_for_decimal_conversion(number):
a = type(number) in (int, long)
b = isinstance(number, (str, unicode, Decimal))
c = isinstance(number, float)
if not (a or b or c):
logger.warning("You are using a number (" + str(number) +
") that is not suitable to convert to Decimal!")
class Currency(object):
"""A currency (USD, EUR, ...)"""
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Currency({0})>".format(self.name)
def __str__(self):
return self.name
def __rmul__(self, other):
try:
return Amount(other, self)
except ValueError:
raise TypeError("Can't multiply currency with {0}".format(other))
def __div__(self, other):
if isinstance(other, Currency):
return ExchangeRate(other, self, 1)
raise TypeError("Can't divide a Currency by a "+str(type(other)))
def __rdiv__(self, other):
if isinstance(other, Amount):
return ExchangeRate(self, other.currency, other.value)
raise TypeError("Can't divide a "++str(type(other))+" by a Currency")
class FiatCurrency(Currency):
pass
class CryptoCurrency(Currency):
pass
class BadCurrency( Exception ):
def __init__(self, exchange_rate, other_currency):
self.er, self.oc= exchange_rate, other_currency
def __str__(self):
s= "A ExchangeRate of {0} cannot handle {1}"
return s.format(self.er, self.oc)
class ExchangeRate(object):
"""The proportion between two currencies' values"""
def __init__(self, c1, c2, exchange_rate):
'''c2 = exchange_rate * c1'''
assert all([isinstance(x, Currency) for x in (c1, c2)])
assert c1 != c2
check_number_for_decimal_conversion(exchange_rate)
self._c= (c1,c2)
self._er = Decimal(exchange_rate)
def convert(self, amount, currency=None):
'''if currency is not specified, converts amount to the other
currency of this ExchangeRate. Otherwise, converts (if needed)
to the specified one'''
if currency==amount.currency:
return amount
assert isinstance(amount, Amount)
i= self._isFirst( amount.currency)
c= self._c[1 if i else 0]
if currency and c!=currency:
i= not(i)
er= self._er if i else 1 / self._er
if currency and c!=currency:
raise BadCurrency(self, currency)
return Amount(amount.value * er, c)
def reverse( self ):
'''returns a ExchangeRate with swapped currencies order.
The relative value of the currencies remains the same'''
return ExchangeRate(self._c[1], self._c[0], 1/self._er)
def abs(self):
return self._er
def convert_exchangerate( self, exchange_rate):
'''Let (CA0,CA1) be the currencies of self, and (CB0,CB1) the
currencies of exchange_rate. If CA0==CB0, this method returns a
new ExchangeRate with currencies CA1, CB1, converting the
internal exchange rate to match.'''
a,b= self, exchange_rate
common_currency= set(a._c).intersection(b._c)
if len(common_currency)!=1:
raise Exception("Can't convert: currencies don't match")
cc= common_currency.pop()
if cc==a._c[1]:
a=a.reverse()
if cc==b._c[1]:
b=b.reverse()
return ExchangeRate( a._c[1], b._c[1], b._er/a._er )
def _isFirst(self, currency):
'''returns if currency is the first'''
if self._c[0] == currency:
return True
elif self._c[1] == currency:
return False
else:
raise BadCurrency(self, currency)
def otherCurrency(self, currency):
return self._c[ 1 if self._isFirst(currency) else 0 ]
def inverse(self):
'''returns the inverse exchange rate.
The relative value of the currencies is swapped'''
return ExchangeRate(self._c[1], self._c[0], self._er )
def per(self, currency):
'''gives the ExchangeRate with currency as the denominator.'''
if self._c[0]==currency:
return self
else:
return self.reverse()
def __cmp__(self, other):
e=ValueError("can't compare the two values:", str(self),
str(other))
if not isinstance(other, ExchangeRate):
raise e
if self._c[0]!=other._c[0] or self._c[1]!=other._c[1]:
raise e
return cmp(self._er, other._er)
def __repr__(self):
return "<ExchangeRate({:.2f} {}/{})>".format( self._er,
self._c[1].name,
self._c[0].name)
def __str__(self):
return "{:.2f} {}/{}".format( self._er, self._c[1].name,
self._c[0].name)
def clone(self):
# returns a copy of this ExchangeRate
return ExchangeRate(self._c[0], self._c[1], self._er)
def __iadd__(self, other):
if isinstance(other, ExchangeRate):
if self._c!=other._c:
raise ValueError("Can't sum two ExchangeRate with " + \
"different currencies")
self._er += other._er
else:
raise ValueError("Can't sum ExchangeRate to ", type(other))
return self
def __add__(self, other):
a = self.clone()
a += other
return a
def __neg__(self):
a = self.clone()
a._er = -a._er
return a
def __isub__(self, other):
self += -other
return self
def __sub__(self, other):
a = self.clone() + (-other)
return a
def __mul__(self, other):
if isinstance(other, Amount):
return self.convert(other)
elif isinstance(other, ExchangeRate):
assert other.c[1] == self.c[0] or other.c[0] == self.c[1]
if other.c[0] == self.c[1] and other.c[1] == self.c[0]:
return Decimal(other._er * self._er)
elif other.c[0] == self.c[1]: # 0 is the denominator
return ExchangeRate(self.c[0], other.c[1], other._er*self._er)
else:
return ExchangeRate(other.c[0], self.c[1], other._er*self._er)
else:
raise Exception("Can only multiply currency that reduce to a simple C1/C2 exchange rate")
class Amount(object):
"""An amount of a given currency"""
def __init__(self, value, currency):
check_number_for_decimal_conversion(value)
try:
self.value = Decimal(value)
except:
raise ValueError("Can't convert {0} to decimal".format(value))
self.currency = currency
def convert(self, currencyequivalence, to_currency):
if self.currency != to_currency:
currencyequivalence.convert(self)
def clone(self):
# returns a copy of this amount
return Amount(self.value, self.currency)
def __repr__(self):
return "<Amount({:.2f} {})>".format(self.value, self.currency)
def __str__(self):
return "{:.2f} {}".format(self.value, self.currency)
def __iadd__(self, other):
if type(other) in (int, float) or isinstance(other, Decimal):
self.value += other
elif isinstance(other, Amount):
if self.currency != other.currency:
raise ValueError("Can't sum two amounts in " + \
"different currencies")
self.value += other.value
else:
raise ValueError("Can't sum Amount to ", type(other))
return self
def __add__(self, other):
a = self.clone()
a += other
return a
def __neg__(self):
a = self.clone()
a.value = -a.value
return a
def __isub__(self, other):
self += -other
return self
def __sub__(self, other):
a = self.clone() + (-other)
return a
def __imul__(self, other):
if type(other) in (int, float) or isinstance(other, Decimal):
self.value *= other
else:
raise ValueError("Can't multiply Amount to ", type(other))
return self
def __mul__(self, other):
a = self.clone()
a *= other
return a
def __cmp__(self, other):
if not isinstance(other, Amount) or other.currency != self.currency:
raise ValueError("can't compare the two amounts",
str(self), str(other))
return cmp(self.value, other.value)
|
dkronst/mexbtcapi
|
mexbtcapi/concepts/currency.py
|
Python
|
cc0-1.0
| 8,843
|
#
# Get the pin which correlates with a given purpose.
#
# @param char array purpose
# The purpose to search by.
# @return int
# A pin which can be used for the given purpose.
#
def getPin(purpose):
purpose_collection = {
"i2c-data": 20
"i2c-clock": 19
"adc": 39
"adc0": 39
"adc-0": 39
"one-wire-data": 40
"adc1": 40
"adc-1": 40
"spi-slave-select": 28
"spi-master-out-slave-in": 30
"spi-master-in-slave-out": 29
"spi-clock": 31
}
if purpose in purpose_collection:
return purpose_collection[purpose]
else
return -1
|
makerblueprint/retrospecification
|
modules/host/beaglebone-black/beaglebone-black.py
|
Python
|
cc0-1.0
| 599
|
import re
def check_patterns():
patterns_list = []
#Read patterns file
with open("patterns.txt") as patterns1:
for line in patterns1:
line = line.rstrip("\n")
patterns_list.append(line)
print(patterns_list)
#Read log file
with open("Sample Logs.txt") as log1:
with open("matching_logs.txt", "w") as output_fh:
for log_line in log1:
log_line = log_line.rstrip("\n")
for pattern in patterns_list:
match = re.search(pattern, log_line)
if match != None:
log_line.lstrip(" ")
count = int(match.group(1))
host = match.group(2)
if count > 100:
print(host, " Has count > 100")
print(log_line)
def rstrip_example():
s1 = "hello"
s1 = s1.rstrip("lo")
print(s1)
check_patterns()
|
gatoravi/python_chennai_jul2016
|
code/parsing_logs_arun.py
|
Python
|
cc0-1.0
| 991
|
"""
Django settings for djangoecommerce project.
Generated by 'django-admin startproject' using Django 1.9.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qg(sw)grt&2v+++odrz%zac+h*2f@gyd*szcov1u2x$=ul%svz'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# libs
'widget_tweaks',
# apps
'core',
'accounts',
'catalog',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'core.middleware.LogMiddleware',
]
ROOT_URLCONF = 'djangoecommerce.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
# apps
'catalog.context_processors.categories',
],
},
},
]
WSGI_APPLICATION = 'djangoecommerce.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'America/Recife'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
# Update database configuration with $DATABASE_URL.
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
# auth
LOGIN_REDIRECT_URL = 'accounts:index'
LOGIN_URL = 'login'
AUTH_USER_MODEL = 'accounts.User'
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'accounts.backends.ModelBackend',
]
# Messages
from django.contrib.messages import constants as message_constants
MESSAGE_TAGS = {
message_constants.DEBUG: 'debug',
message_constants.INFO: 'info',
message_constants.SUCCESS: 'success',
message_constants.WARNING: 'warning',
message_constants.ERROR: 'danger',
}
try:
from .local_settings import *
except ImportError:
pass
|
gileno/curso-citi
|
djangoecommerce/settings.py
|
Python
|
cc0-1.0
| 4,345
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
check same interface phos_binding patterns
"""
import os
import sys
import urllib
import urllib2
import cPickle as pickle
from multiprocessing import Pool
def get_entityid(p):
pdbid,interface_id,chain1,chain2 = p
url = 'http://www.rcsb.org/pdb/rest/customReport.csv?'
data = {
'pdbids':pdbid,
'customReportColumns':'structureId,entityId',
'service':'wsfile',
'format':'csv',
}
data = urllib.urlencode(data)
req = urllib2.Request(url,data)
response = urllib2.urlopen(req)
lines = response.readlines()
lines = [line.rstrip('\r\n') for line in lines[1:]]
lines = [line for line in lines if line]
lines = [line.split(',') for line in lines]
lines = [[w.strip('"') for w in line] for line in lines]
chain1_id = [line for line in lines if line[1] == chain1][0][2]
chain2_id = [line for line in lines if line[1] == chain1][0][2]
return pdbid,interface_id,chain1_id,chain2_id
def filter_same_interface(pdb_interfaces):
pdbid_chain = [(p[0],p[1],p[-1][0][0],p[-1][1][0]) for p in pdb_interfaces]
p = Pool(4)
result = p.map(get_entityid,pdbid_chain)
p.close()
pdb_chain_entity = {}
for r in result:
if not (r[0],r[2],r[3]) in pdb_chain_entity.keys():
pdb_chain_entity[(r[0],r[2],r[3])] = [r]
else:
pdb_chain_entity[(r[0],r[2],r[3])].append(r)
with open('same_interface.txt','w') as w_f:
same = []
different = []
for k,v in pdb_chain_entity.iteritems():
if len(v) > 1:
print >> w_f,k
cluster = [p for p in pdb_interfaces if (p[0],p[1]) in [(vi[0],vi[1]) for vi in v]]
cluster_patterns = []
for c in cluster:
bonds = c[6]
phos_interacting_residues = {}
PHOS = ['TPO_ O1P','TPO_ O2P','TPO_ O3P','TPO_ OG1','SEP_ O1P','SEP_ O2P','SEP_ O3P','SEP_ OG ','PTR_ O1P','PTR_ O2P','PTR _O3P','PTR OH ']
for bond in bonds:
bond_type,bond_info = bond
for bondi in bond_info:
res1,res2,dist = bondi
if [p for p in PHOS if res1[-8:] == p]:
res1 = '_'.join(res1.split('_')[:3])
if not res1 in phos_interacting_residues.keys():
phos_interacting_residues[res1] = [res2]
else:
phos_interacting_residues[res1].append(res2)
elif [p for p in PHOS if res2[-8:] == p]:
res2 = '_'.join(res2.split('_')[:3])
if not res2 in phos_interacting_residues.keys():
phos_interacting_residues[res2] = [res1]
else:
phos_interacting_residues[res2].append(res1)
for phos,interacting_residues in phos_interacting_residues.items():
if interacting_residues:
interacting_residues = ['_'.join(r.split('_')[:3]) for r in interacting_residues]
interacting_residues = list(set(interacting_residues))
interacting_residues = [r.split('_')[2] for r in interacting_residues]
interacting_residues = sorted(interacting_residues)
interacting_residues = '_'.join(interacting_residues)
cluster_patterns.append(interacting_residues)
print >> w_f,c[0],c[1],interacting_residues
print cluster_patterns
if len(cluster_patterns) > 1 and len(set(cluster_patterns)) == 1:
same.append(1)
else:
different.append(1)
print 'same',len(same)
print 'different',len(different)
pdb_unique_interface = [(v[0][0],v[0][1]) for k,v in pdb_chain_entity.iteritems()]
pdb_interfaces = [p for p in pdb_interfaces if (p[0],p[1]) in pdb_unique_interface]
print 'after filter same entity',len(pdb_interfaces)
return pdb_interfaces
def filter_non_one_phos(pdb_interfaces):
zero_phos_interfaces = []
one_phos_interfaces = []
more_phos_interfaces = []
for interface in pdb_interfaces:
pdbid,p1,interface_area,p2,p3,p4,bonds = interface[:7]
phos_res = []
for bond in bonds:
bond_type,bond_info = bond
for bondi in bond_info:
res1,res2,dist = bondi
if 'TPO' in res1 or 'SEP' in res1 or 'PTR' in res1:
phos_res.append('_'.join(res1.split('_')[:3]))
if 'TPO' in res2 or 'SEP' in res2 or 'PTR' in res2:
phos_res.append('_'.join(res2.split('_')[:3]))
phos_res = set(phos_res)
if len(phos_res) == 1:
one_phos_interfaces.append(interface)
elif len(phos_res) > 1:
more_phos_interfaces.append(interface)
else:
zero_phos_interfaces.append(interface)
print 'after filter non_one_phos_interfaces',len(one_phos_interfaces)
return one_phos_interfaces
def main():
pdb_interfaces = pickle.load(open(sys.argv[-1]))
pdb_interfaces = [p for p in pdb_interfaces if p[7][0][2].lower() == 'x,y,z' and p[7][1][2].lower() == 'x,y,z']
pdb_interfaces = [p for p in pdb_interfaces if p[7][0][1] == 'Protein' and p[7][1][1] == 'Protein']
pdb_interfaces = filter_non_one_phos(pdb_interfaces)
pdb_interfaces = filter_same_interface(pdb_interfaces)
if __name__ == "__main__":
main()
|
lituan/tools
|
pisa/pisa_same_entity.py
|
Python
|
cc0-1.0
| 5,849
|
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'MODEL1006230003.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString)
|
biomodels/MODEL1006230003
|
MODEL1006230003/model.py
|
Python
|
cc0-1.0
| 427
|
#! /usr/bin/python
import sys, localconfig, platform, time
#OS Runtime comments
if platform.system() == "Windows":
sys.path.append(localconfig.winpath)
print "You are running the AnkitBot UAA Module for Windows. Sponsored by DQ. :)"
else:
sys.path.append(localconfig.linuxpath)
print "You are running the AnkitBot UAA Module for Linux. Sponsored by DQ. :)"
import wikipedia
import globalfunc as globe
override = False
if not globe.startAllowed(override):
print "Fatal - System Access Denied."
sys.exit(1)
print "System Alert - Program is still running."
globe.main()
globe.checkWait()
globe.pageCleanup()
wikipedia.stopme()
|
QEDK/AnkitBot
|
UAA/UAA.py
|
Python
|
epl-1.0
| 680
|
#
# Copyright (c) 2010 Mikhail Gusarov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
""" path.py - An object representing a path to a file or directory.
Original author:
Jason Orendorff <jason.orendorff\x40gmail\x2ecom>
Current maintainer:
Jason R. Coombs <jaraco@jaraco.com>
Contributors:
Mikhail Gusarov <dottedmag@dottedmag.net>
Marc Abramowitz <marc@marc-abramowitz.com>
Jason R. Coombs <jaraco@jaraco.com>
Jason Chu <jchu@xentac.net>
Vojislav Stojkovic <vstojkovic@syntertainment.com>
Example::
from path import path
d = path('/home/guido/bin')
for f in d.files('*.py'):
f.chmod(0o755)
path.py requires Python 2.5 or later.
"""
from __future__ import print_function, division, absolute_import
import sys
import warnings
import os
import fnmatch
import glob
import shutil
import codecs
import hashlib
import errno
import tempfile
import functools
import operator
import re
import contextlib
try:
import win32security
except ImportError:
pass
try:
import pwd
except ImportError:
pass
################################
# Monkey patchy python 3 support
try:
basestring
except NameError:
basestring = str
try:
unicode
except NameError:
unicode = str
try:
getcwdu = os.getcwdu
except AttributeError:
getcwdu = os.getcwd
if sys.version < '3':
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
o777 = 511
o766 = 502
o666 = 438
o554 = 364
################################
##########################
# Python 2.5 compatibility
try:
from functools import reduce
except ImportError:
pass
##########################
__version__ = '5.1'
__all__ = ['path', 'CaseInsensitivePattern']
class TreeWalkWarning(Warning):
pass
def simple_cache(func):
"""
Save results for the 'using_module' classmethod.
When Python 3.2 is available, use functools.lru_cache instead.
"""
saved_results = {}
def wrapper(cls, module):
if module in saved_results:
return saved_results[module]
saved_results[module] = func(cls, module)
return saved_results[module]
return wrapper
class ClassProperty(property):
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class multimethod(object):
"""
Acts like a classmethod when invoked from the class and like an
instancemethod when invoked from the instance.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
return (
functools.partial(self.func, owner) if instance is None
else functools.partial(self.func, owner, instance)
)
class path(unicode):
""" Represents a filesystem path.
For documentation on individual methods, consult their
counterparts in os.path.
"""
module = os.path
""" The path module to use for path operations.
.. seealso:: :mod:`os.path`
"""
def __init__(self, other=''):
if other is None:
raise TypeError("Invalid initial value for path: None")
@classmethod
@simple_cache
def using_module(cls, module):
subclass_name = cls.__name__ + '_' + module.__name__
bases = (cls,)
ns = {'module': module}
return type(subclass_name, bases, ns)
@ClassProperty
@classmethod
def _next_class(cls):
"""
What class should be used to construct new instances from this class
"""
return cls
# --- Special Python methods.
def __repr__(self):
return '%s(%s)' % (type(self).__name__, super(path, self).__repr__())
# Adding a path and a string yields a path.
def __add__(self, more):
try:
return self._next_class(super(path, self).__add__(more))
except TypeError: # Python bug
return NotImplemented
def __radd__(self, other):
if not isinstance(other, basestring):
return NotImplemented
return self._next_class(other.__add__(self))
# The / operator joins paths.
def __div__(self, rel):
""" fp.__div__(rel) == fp / rel == fp.joinpath(rel)
Join two path components, adding a separator character if
needed.
.. seealso:: :func:`os.path.join`
"""
return self._next_class(self.module.join(self, rel))
# Make the / operator work even when true division is enabled.
__truediv__ = __div__
def __enter__(self):
self._old_dir = self.getcwd()
os.chdir(self)
return self
def __exit__(self, *_):
os.chdir(self._old_dir)
@classmethod
def getcwd(cls):
""" Return the current working directory as a path object.
.. seealso:: :func:`os.getcwdu`
"""
return cls(getcwdu())
#
# --- Operations on path strings.
def abspath(self):
""" .. seealso:: :func:`os.path.abspath` """
return self._next_class(self.module.abspath(self))
def normcase(self):
""" .. seealso:: :func:`os.path.normcase` """
return self._next_class(self.module.normcase(self))
def normpath(self):
""" .. seealso:: :func:`os.path.normpath` """
return self._next_class(self.module.normpath(self))
def realpath(self):
""" .. seealso:: :func:`os.path.realpath` """
return self._next_class(self.module.realpath(self))
def expanduser(self):
""" .. seealso:: :func:`os.path.expanduser` """
return self._next_class(self.module.expanduser(self))
def expandvars(self):
""" .. seealso:: :func:`os.path.expandvars` """
return self._next_class(self.module.expandvars(self))
def dirname(self):
""" .. seealso:: :attr:`parent`, :func:`os.path.dirname` """
return self._next_class(self.module.dirname(self))
def basename(self):
""" .. seealso:: :attr:`name`, :func:`os.path.basename` """
return self._next_class(self.module.basename(self))
def expand(self):
""" Clean up a filename by calling :meth:`expandvars()`,
:meth:`expanduser()`, and :meth:`normpath()` on it.
This is commonly everything needed to clean up a filename
read from a configuration file, for example.
"""
return self.expandvars().expanduser().normpath()
@property
def namebase(self):
""" The same as :meth:`name`, but with one file extension stripped off.
For example,
``path('/home/guido/python.tar.gz').name == 'python.tar.gz'``,
but
``path('/home/guido/python.tar.gz').namebase == 'python.tar'``.
"""
base, ext = self.module.splitext(self.name)
return base
@property
def ext(self):
""" The file extension, for example ``'.py'``. """
f, ext = self.module.splitext(self)
return ext
@property
def drive(self):
""" The drive specifier, for example ``'C:'``.
This is always empty on systems that don't use drive specifiers.
"""
drive, r = self.module.splitdrive(self)
return self._next_class(drive)
parent = property(
dirname, None, None,
""" This path's parent directory, as a new path object.
For example,
``path('/usr/local/lib/libpython.so').parent ==
path('/usr/local/lib')``
.. seealso:: :meth:`dirname`, :func:`os.path.dirname`
""")
name = property(
basename, None, None,
""" The name of this file or directory without the full path.
For example,
``path('/usr/local/lib/libpython.so').name == 'libpython.so'``
.. seealso:: :meth:`basename`, :func:`os.path.basename`
""")
def splitpath(self):
""" p.splitpath() -> Return ``(p.parent, p.name)``.
.. seealso:: :attr:`parent`, :attr:`name`, :func:`os.path.split`
"""
parent, child = self.module.split(self)
return self._next_class(parent), child
def splitdrive(self):
""" p.splitdrive() -> Return ``(p.drive, <the rest of p>)``.
Split the drive specifier from this path. If there is
no drive specifier, p.drive is empty, so the return value
is simply ``(path(''), p)``. This is always the case on Unix.
.. seealso:: :func:`os.path.splitdrive`
"""
drive, rel = self.module.splitdrive(self)
return self._next_class(drive), rel
def splitext(self):
""" p.splitext() -> Return ``(p.stripext(), p.ext)``.
Split the filename extension from this path and return
the two parts. Either part may be empty.
The extension is everything from ``'.'`` to the end of the
last path segment. This has the property that if
``(a, b) == p.splitext()``, then ``a + b == p``.
.. seealso:: :func:`os.path.splitext`
"""
filename, ext = self.module.splitext(self)
return self._next_class(filename), ext
def stripext(self):
""" p.stripext() -> Remove one file extension from the path.
For example, ``path('/home/guido/python.tar.gz').stripext()``
returns ``path('/home/guido/python.tar')``.
"""
return self.splitext()[0]
def splitunc(self):
""" .. seealso:: :func:`os.path.splitunc` """
unc, rest = self.module.splitunc(self)
return self._next_class(unc), rest
@property
def uncshare(self):
"""
The UNC mount point for this path.
This is empty for paths on local drives.
"""
unc, r = self.module.splitunc(self)
return self._next_class(unc)
@multimethod
def joinpath(cls, first, *others):
"""
Join first to zero or more path components, adding a separator
character (``first.module.sep``) if needed. Returns a new instance of
``first._next_class``.
.. seealso:: :func:`os.path.join`
"""
if not isinstance(first, cls):
first = cls(first)
return first._next_class(first.module.join(first, *others))
def splitall(self):
r""" Return a list of the path components in this path.
The first item in the list will be a path. Its value will be
either :data:`os.curdir`, :data:`os.pardir`, empty, or the root
directory of this path (for example, ``'/'`` or ``'C:\\'``). The
other items in the list will be strings.
``path.path.joinpath(*result)`` will yield the original path.
"""
parts = []
loc = self
while loc != os.curdir and loc != os.pardir:
prev = loc
loc, child = prev.splitpath()
if loc == prev:
break
parts.append(child)
parts.append(loc)
parts.reverse()
return parts
def relpath(self, start='.'):
""" Return this path as a relative path,
based from `start`, which defaults to the current working directory.
"""
cwd = self._next_class(start)
return cwd.relpathto(self)
def relpathto(self, dest):
""" Return a relative path from `self` to `dest`.
If there is no relative path from `self` to `dest`, for example if
they reside on different drives in Windows, then this returns
``dest.abspath()``.
"""
origin = self.abspath()
dest = self._next_class(dest).abspath()
orig_list = origin.normcase().splitall()
# Don't normcase dest! We want to preserve the case.
dest_list = dest.splitall()
if orig_list[0] != self.module.normcase(dest_list[0]):
# Can't get here from there.
return dest
# Find the location where the two paths start to differ.
i = 0
for start_seg, dest_seg in zip(orig_list, dest_list):
if start_seg != self.module.normcase(dest_seg):
break
i += 1
# Now i is the point where the two paths diverge.
# Need a certain number of "os.pardir"s to work up
# from the origin to the point of divergence.
segments = [os.pardir] * (len(orig_list) - i)
# Need to add the diverging part of dest_list.
segments += dest_list[i:]
if len(segments) == 0:
# If they happen to be identical, use os.curdir.
relpath = os.curdir
else:
relpath = self.module.join(*segments)
return self._next_class(relpath)
# --- Listing, searching, walking, and matching
def listdir(self, pattern=None):
""" D.listdir() -> List of items in this directory.
Use :meth:`files` or :meth:`dirs` instead if you want a listing
of just files or just subdirectories.
The elements of the list are path objects.
With the optional `pattern` argument, this only lists
items whose names match the given pattern.
.. seealso:: :meth:`files`, :meth:`dirs`
"""
if pattern is None:
pattern = '*'
return [
self / child
for child in os.listdir(self)
if self._next_class(child).fnmatch(pattern)
]
def dirs(self, pattern=None):
""" D.dirs() -> List of this directory's subdirectories.
The elements of the list are path objects.
This does not walk recursively into subdirectories
(but see :meth:`walkdirs`).
With the optional `pattern` argument, this only lists
directories whose names match the given pattern. For
example, ``d.dirs('build-*')``.
"""
return [p for p in self.listdir(pattern) if p.isdir()]
def files(self, pattern=None):
""" D.files() -> List of the files in this directory.
The elements of the list are path objects.
This does not walk into subdirectories (see :meth:`walkfiles`).
With the optional `pattern` argument, this only lists files
whose names match the given pattern. For example,
``d.files('*.pyc')``.
"""
return [p for p in self.listdir(pattern) if p.isfile()]
def walk(self, pattern=None, errors='strict'):
""" D.walk() -> iterator over files and subdirs, recursively.
The iterator yields path objects naming each child item of
this directory and its descendants. This requires that
D.isdir().
This performs a depth-first traversal of the directory tree.
Each directory is returned just before all its children.
The `errors=` keyword argument controls behavior when an
error occurs. The default is 'strict', which causes an
exception. The other allowed values are 'warn', which
reports the error via ``warnings.warn()``, and 'ignore'.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
childList = self.listdir()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in childList:
if pattern is None or child.fnmatch(pattern):
yield child
try:
isdir = child.isdir()
except Exception:
if errors == 'ignore':
isdir = False
elif errors == 'warn':
warnings.warn(
"Unable to access '%s': %s"
% (child, sys.exc_info()[1]),
TreeWalkWarning)
isdir = False
else:
raise
if isdir:
for item in child.walk(pattern, errors):
yield item
def walkdirs(self, pattern=None, errors='strict'):
""" D.walkdirs() -> iterator over subdirs, recursively.
With the optional `pattern` argument, this yields only
directories whose names match the given pattern. For
example, ``mydir.walkdirs('*test')`` yields only directories
with names ending in 'test'.
The `errors=` keyword argument controls behavior when an
error occurs. The default is 'strict', which causes an
exception. The other allowed values are 'warn', which
reports the error via ``warnings.warn()``, and 'ignore'.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
dirs = self.dirs()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in dirs:
if pattern is None or child.fnmatch(pattern):
yield child
for subsubdir in child.walkdirs(pattern, errors):
yield subsubdir
def walkfiles(self, pattern=None, errors='strict'):
""" D.walkfiles() -> iterator over files in D, recursively.
The optional argument, `pattern`, limits the results to files
with names that match the pattern. For example,
``mydir.walkfiles('*.tmp')`` yields only files with the .tmp
extension.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
childList = self.listdir()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in childList:
try:
isfile = child.isfile()
isdir = not isfile and child.isdir()
except:
if errors == 'ignore':
continue
elif errors == 'warn':
warnings.warn(
"Unable to access '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
continue
else:
raise
if isfile:
if pattern is None or child.fnmatch(pattern):
yield child
elif isdir:
for f in child.walkfiles(pattern, errors):
yield f
def fnmatch(self, pattern, normcase=None):
""" Return ``True`` if `self.name` matches the given pattern.
pattern - A filename pattern with wildcards,
for example ``'*.py'``. If the pattern contains a `normcase`
attribute, it is applied to the name and path prior to comparison.
normcase - (optional) A function used to normalize the pattern and
filename before matching. Defaults to self.module which defaults
to os.path.normcase.
.. seealso:: :func:`fnmatch.fnmatch`
"""
default_normcase = getattr(pattern, 'normcase', self.module.normcase)
normcase = normcase or default_normcase
name = normcase(self.name)
pattern = normcase(pattern)
return fnmatch.fnmatchcase(name, pattern)
def glob(self, pattern):
""" Return a list of path objects that match the pattern.
`pattern` - a path relative to this directory, with wildcards.
For example, ``path('/users').glob('*/bin/*')`` returns a list
of all the files users have in their bin directories.
.. seealso:: :func:`glob.glob`
"""
cls = self._next_class
return [cls(s) for s in glob.glob(self / pattern)]
#
# --- Reading or writing an entire file at once.
def open(self, *args, **kwargs):
""" Open this file. Return a file object.
.. seealso:: :func:`python:open`
"""
return open(self, *args, **kwargs)
def bytes(self):
""" Open this file, read all bytes, return them as a string. """
with self.open('rb') as f:
return f.read()
def chunks(self, size, *args, **kwargs):
""" Returns a generator yielding chunks of the file, so it can
be read piece by piece with a simple for loop.
Any argument you pass after `size` will be passed to `open()`.
:example:
>>> hash = hashlib.md5()
>>> for chunk in path("path.py").chunks(8192, mode='rb'):
... hash.update(chunk)
This will read the file by chunks of 8192 bytes.
"""
with open(self, *args, **kwargs) as f:
while True:
d = f.read(size)
if not d:
break
yield d
def write_bytes(self, bytes, append=False):
""" Open this file and write the given bytes to it.
Default behavior is to overwrite any existing file.
Call ``p.write_bytes(bytes, append=True)`` to append instead.
"""
if append:
mode = 'ab'
else:
mode = 'wb'
with self.open(mode) as f:
f.write(bytes)
def text(self, encoding=None, errors='strict'):
r""" Open this file, read it in, return the content as a string.
This method uses ``'U'`` mode, so ``'\r\n'`` and ``'\r'`` are
automatically translated to ``'\n'``.
Optional arguments:
`encoding` - The Unicode encoding (or character set) of
the file. If present, the content of the file is
decoded and returned as a unicode object; otherwise
it is returned as an 8-bit str.
`errors` - How to handle Unicode errors; see :meth:`str.decode`
for the options. Default is 'strict'.
.. seealso:: :meth:`lines`
"""
if encoding is None:
# 8-bit
with self.open('U') as f:
return f.read()
else:
# Unicode
with codecs.open(self, 'r', encoding, errors) as f:
# (Note - Can't use 'U' mode here, since codecs.open
# doesn't support 'U' mode.)
t = f.read()
return (t.replace(u('\r\n'), u('\n'))
.replace(u('\r\x85'), u('\n'))
.replace(u('\r'), u('\n'))
.replace(u('\x85'), u('\n'))
.replace(u('\u2028'), u('\n')))
def write_text(self, text, encoding=None, errors='strict',
linesep=os.linesep, append=False):
r""" Write the given text to this file.
The default behavior is to overwrite any existing file;
to append instead, use the `append=True` keyword argument.
There are two differences between :meth:`write_text` and
:meth:`write_bytes`: newline handling and Unicode handling.
See below.
Parameters:
`text` - str/unicode - The text to be written.
`encoding` - str - The Unicode encoding that will be used.
This is ignored if 'text' isn't a Unicode string.
`errors` - str - How to handle Unicode encoding errors.
Default is 'strict'. See help(unicode.encode) for the
options. This is ignored if 'text' isn't a Unicode
string.
`linesep` - keyword argument - str/unicode - The sequence of
characters to be used to mark end-of-line. The default is
:data:`os.linesep`. You can also specify ``None``; this means to
leave all newlines as they are in `text`.
`append` - keyword argument - bool - Specifies what to do if
the file already exists (``True``: append to the end of it;
``False``: overwrite it.) The default is ``False``.
--- Newline handling.
write_text() converts all standard end-of-line sequences
(``'\n'``, ``'\r'``, and ``'\r\n'``) to your platform's default
end-of-line sequence (see :data:`os.linesep`; on Windows, for example,
the end-of-line marker is ``'\r\n'``).
If you don't like your platform's default, you can override it
using the `linesep=` keyword argument. If you specifically want
write_text() to preserve the newlines as-is, use ``linesep=None``.
This applies to Unicode text the same as to 8-bit text, except
there are three additional standard Unicode end-of-line sequences:
``u'\x85'``, ``u'\r\x85'``, and ``u'\u2028'``.
(This is slightly different from when you open a file for
writing with ``fopen(filename, "w")`` in C or ``open(filename, 'w')``
in Python.)
--- Unicode
If `text` isn't Unicode, then apart from newline handling, the
bytes are written verbatim to the file. The `encoding` and
`errors` arguments are not used and must be omitted.
If `text` is Unicode, it is first converted to bytes using the
specified 'encoding' (or the default encoding if `encoding`
isn't specified). The `errors` argument applies only to this
conversion.
"""
if isinstance(text, unicode):
if linesep is not None:
# Convert all standard end-of-line sequences to
# ordinary newline characters.
text = (text.replace(u('\r\n'), u('\n'))
.replace(u('\r\x85'), u('\n'))
.replace(u('\r'), u('\n'))
.replace(u('\x85'), u('\n'))
.replace(u('\u2028'), u('\n')))
text = text.replace(u('\n'), linesep)
if encoding is None:
encoding = sys.getdefaultencoding()
bytes = text.encode(encoding, errors)
else:
# It is an error to specify an encoding if 'text' is
# an 8-bit string.
assert encoding is None
if linesep is not None:
text = (text.replace('\r\n', '\n')
.replace('\r', '\n'))
bytes = text.replace('\n', linesep)
self.write_bytes(bytes, append)
def lines(self, encoding=None, errors='strict', retain=True):
r""" Open this file, read all lines, return them in a list.
Optional arguments:
`encoding` - The Unicode encoding (or character set) of
the file. The default is None, meaning the content
of the file is read as 8-bit characters and returned
as a list of (non-Unicode) str objects.
`errors` - How to handle Unicode errors; see help(str.decode)
for the options. Default is 'strict'
`retain` - If true, retain newline characters; but all newline
character combinations (``'\r'``, ``'\n'``, ``'\r\n'``) are
translated to ``'\n'``. If false, newline characters are
stripped off. Default is True.
This uses ``'U'`` mode.
.. seealso:: :meth:`text`
"""
if encoding is None and retain:
with self.open('U') as f:
return f.readlines()
else:
return self.text(encoding, errors).splitlines(retain)
def write_lines(self, lines, encoding=None, errors='strict',
linesep=os.linesep, append=False):
r""" Write the given lines of text to this file.
By default this overwrites any existing file at this path.
This puts a platform-specific newline sequence on every line.
See `linesep` below.
`lines` - A list of strings.
`encoding` - A Unicode encoding to use. This applies only if
`lines` contains any Unicode strings.
`errors` - How to handle errors in Unicode encoding. This
also applies only to Unicode strings.
linesep - The desired line-ending. This line-ending is
applied to every line. If a line already has any
standard line ending (``'\r'``, ``'\n'``, ``'\r\n'``,
``u'\x85'``, ``u'\r\x85'``, ``u'\u2028'``), that will
be stripped off and this will be used instead. The
default is os.linesep, which is platform-dependent
(``'\r\n'`` on Windows, ``'\n'`` on Unix, etc.).
Specify ``None`` to write the lines as-is, like
:meth:`file.writelines`.
Use the keyword argument append=True to append lines to the
file. The default is to overwrite the file. Warning:
When you use this with Unicode data, if the encoding of the
existing data in the file is different from the encoding
you specify with the encoding= parameter, the result is
mixed-encoding data, which can really confuse someone trying
to read the file later.
"""
if append:
mode = 'ab'
else:
mode = 'wb'
with self.open(mode) as f:
for line in lines:
isUnicode = isinstance(line, unicode)
if linesep is not None:
# Strip off any existing line-end and add the
# specified linesep string.
if isUnicode:
if line[-2:] in (u('\r\n'), u('\x0d\x85')):
line = line[:-2]
elif line[-1:] in (u('\r'), u('\n'),
u('\x85'), u('\u2028')):
line = line[:-1]
else:
if line[-2:] == '\r\n':
line = line[:-2]
elif line[-1:] in ('\r', '\n'):
line = line[:-1]
line += linesep
if isUnicode:
if encoding is None:
encoding = sys.getdefaultencoding()
line = line.encode(encoding, errors)
f.write(line)
def read_md5(self):
""" Calculate the md5 hash for this file.
This reads through the entire file.
.. seealso:: :meth:`read_hash`
"""
return self.read_hash('md5')
def _hash(self, hash_name):
""" Returns a hash object for the file at the current path.
`hash_name` should be a hash algo name such as 'md5' or 'sha1'
that's available in the :mod:`hashlib` module.
"""
m = hashlib.new(hash_name)
for chunk in self.chunks(8192, mode="rb"):
m.update(chunk)
return m
def read_hash(self, hash_name):
""" Calculate given hash for this file.
List of supported hashes can be obtained from :mod:`hashlib` package.
This reads the entire file.
.. seealso:: :meth:`hashlib.hash.digest`
"""
return self._hash(hash_name).digest()
def read_hexhash(self, hash_name):
""" Calculate given hash for this file, returning hexdigest.
List of supported hashes can be obtained from :mod:`hashlib` package.
This reads the entire file.
.. seealso:: :meth:`hashlib.hash.hexdigest`
"""
return self._hash(hash_name).hexdigest()
# --- Methods for querying the filesystem.
# N.B. On some platforms, the os.path functions may be implemented in C
# (e.g. isdir on Windows, Python 3.2.2), and compiled functions don't get
# bound. Playing it safe and wrapping them all in method calls.
def isabs(self):
""" .. seealso:: :func:`os.path.isabs` """
return self.module.isabs(self)
def exists(self):
""" .. seealso:: :func:`os.path.exists` """
return self.module.exists(self)
def isdir(self):
""" .. seealso:: :func:`os.path.isdir` """
return self.module.isdir(self)
def isfile(self):
""" .. seealso:: :func:`os.path.isfile` """
return self.module.isfile(self)
def islink(self):
""" .. seealso:: :func:`os.path.islink` """
return self.module.islink(self)
def ismount(self):
""" .. seealso:: :func:`os.path.ismount` """
return self.module.ismount(self)
def samefile(self, other):
""" .. seealso:: :func:`os.path.samefile` """
return self.module.samefile(self, other)
def getatime(self):
""" .. seealso:: :attr:`atime`, :func:`os.path.getatime` """
return self.module.getatime(self)
atime = property(
getatime, None, None,
""" Last access time of the file.
.. seealso:: :meth:`getatime`, :func:`os.path.getatime`
""")
def getmtime(self):
""" .. seealso:: :attr:`mtime`, :func:`os.path.getmtime` """
return self.module.getmtime(self)
mtime = property(
getmtime, None, None,
""" Last-modified time of the file.
.. seealso:: :meth:`getmtime`, :func:`os.path.getmtime`
""")
def getctime(self):
""" .. seealso:: :attr:`ctime`, :func:`os.path.getctime` """
return self.module.getctime(self)
ctime = property(
getctime, None, None,
""" Creation time of the file.
.. seealso:: :meth:`getctime`, :func:`os.path.getctime`
""")
def getsize(self):
""" .. seealso:: :attr:`size`, :func:`os.path.getsize` """
return self.module.getsize(self)
size = property(
getsize, None, None,
""" Size of the file, in bytes.
.. seealso:: :meth:`getsize`, :func:`os.path.getsize`
""")
if hasattr(os, 'access'):
def access(self, mode):
""" Return true if current user has access to this path.
mode - One of the constants :data:`os.F_OK`, :data:`os.R_OK`,
:data:`os.W_OK`, :data:`os.X_OK`
.. seealso:: :func:`os.access`
"""
return os.access(self, mode)
def stat(self):
""" Perform a ``stat()`` system call on this path.
.. seealso:: :meth:`lstat`, :func:`os.stat`
"""
return os.stat(self)
def lstat(self):
""" Like :meth:`stat`, but do not follow symbolic links.
.. seealso:: :meth:`stat`, :func:`os.lstat`
"""
return os.lstat(self)
def __get_owner_windows(self):
r"""
Return the name of the owner of this file or directory. Follow
symbolic links.
Return a name of the form ``ur'DOMAIN\User Name'``; may be a group.
.. seealso:: :attr:`owner`
"""
desc = win32security.GetFileSecurity(
self, win32security.OWNER_SECURITY_INFORMATION)
sid = desc.GetSecurityDescriptorOwner()
account, domain, typecode = win32security.LookupAccountSid(None, sid)
return domain + u('\\') + account
def __get_owner_unix(self):
"""
Return the name of the owner of this file or directory. Follow
symbolic links.
.. seealso:: :attr:`owner`
"""
st = self.stat()
return pwd.getpwuid(st.st_uid).pw_name
def __get_owner_not_implemented(self):
raise NotImplementedError("Ownership not available on this platform.")
if 'win32security' in globals():
get_owner = __get_owner_windows
elif 'pwd' in globals():
get_owner = __get_owner_unix
else:
get_owner = __get_owner_not_implemented
owner = property(
get_owner, None, None,
""" Name of the owner of this file or directory.
.. seealso:: :meth:`get_owner`""")
if hasattr(os, 'statvfs'):
def statvfs(self):
""" Perform a ``statvfs()`` system call on this path.
.. seealso:: :func:`os.statvfs`
"""
return os.statvfs(self)
if hasattr(os, 'pathconf'):
def pathconf(self, name):
""" .. seealso:: :func:`os.pathconf` """
return os.pathconf(self, name)
#
# --- Modifying operations on files and directories
def utime(self, times):
""" Set the access and modified times of this file.
.. seealso:: :func:`os.utime`
"""
os.utime(self, times)
return self
def chmod(self, mode):
""" .. seealso:: :func:`os.chmod` """
os.chmod(self, mode)
return self
if hasattr(os, 'chown'):
def chown(self, uid=-1, gid=-1):
""" .. seealso:: :func:`os.chown` """
os.chown(self, uid, gid)
return self
def rename(self, new):
""" .. seealso:: :func:`os.rename` """
os.rename(self, new)
return self._next_class(new)
def renames(self, new):
""" .. seealso:: :func:`os.renames` """
os.renames(self, new)
return self._next_class(new)
#
# --- Create/delete operations on directories
def mkdir(self, mode=o777):
""" .. seealso:: :func:`os.mkdir` """
os.mkdir(self, mode)
return self
def mkdir_p(self, mode=o777):
""" Like :meth:`mkdir`, but does not raise an exception if the
directory already exists. """
try:
self.mkdir(mode)
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.EEXIST:
raise
return self
def makedirs(self, mode=o777):
""" .. seealso:: :func:`os.makedirs` """
os.makedirs(self, mode)
return self
def makedirs_p(self, mode=o777):
""" Like :meth:`makedirs`, but does not raise an exception if the
directory already exists. """
try:
self.makedirs(mode)
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.EEXIST:
raise
return self
def rmdir(self):
""" .. seealso:: :func:`os.rmdir` """
os.rmdir(self)
return self
def rmdir_p(self):
""" Like :meth:`rmdir`, but does not raise an exception if the
directory is not empty or does not exist. """
try:
self.rmdir()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOTEMPTY and e.errno != errno.EEXIST:
raise
return self
def removedirs(self):
""" .. seealso:: :func:`os.removedirs` """
os.removedirs(self)
return self
def removedirs_p(self):
""" Like :meth:`removedirs`, but does not raise an exception if the
directory is not empty or does not exist. """
try:
self.removedirs()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOTEMPTY and e.errno != errno.EEXIST:
raise
return self
# --- Modifying operations on files
def touch(self):
""" Set the access/modified times of this file to the current time.
Create the file if it does not exist.
"""
fd = os.open(self, os.O_WRONLY | os.O_CREAT, o666)
os.close(fd)
os.utime(self, None)
return self
def remove(self):
""" .. seealso:: :func:`os.remove` """
os.remove(self)
return self
def remove_p(self):
""" Like :meth:`remove`, but does not raise an exception if the
file does not exist. """
try:
self.unlink()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOENT:
raise
return self
def unlink(self):
""" .. seealso:: :func:`os.unlink` """
os.unlink(self)
return self
def unlink_p(self):
""" Like :meth:`unlink`, but does not raise an exception if the
file does not exist. """
self.remove_p()
return self
# --- Links
if hasattr(os, 'link'):
def link(self, newpath):
""" Create a hard link at `newpath`, pointing to this file.
.. seealso:: :func:`os.link`
"""
os.link(self, newpath)
return self._next_class(newpath)
if hasattr(os, 'symlink'):
def symlink(self, newlink):
""" Create a symbolic link at `newlink`, pointing here.
.. seealso:: :func:`os.symlink`
"""
os.symlink(self, newlink)
return self._next_class(newlink)
if hasattr(os, 'readlink'):
def readlink(self):
""" Return the path to which this symbolic link points.
The result may be an absolute or a relative path.
.. seealso:: :meth:`readlinkabs`, :func:`os.readlink`
"""
return self._next_class(os.readlink(self))
def readlinkabs(self):
""" Return the path to which this symbolic link points.
The result is always an absolute path.
.. seealso:: :meth:`readlink`, :func:`os.readlink`
"""
p = self.readlink()
if p.isabs():
return p
else:
return (self.parent / p).abspath()
#
# --- High-level functions from shutil
copyfile = shutil.copyfile
copymode = shutil.copymode
copystat = shutil.copystat
copy = shutil.copy
copy2 = shutil.copy2
copytree = shutil.copytree
if hasattr(shutil, 'move'):
move = shutil.move
rmtree = shutil.rmtree
def rmtree_p(self):
""" Like :meth:`rmtree`, but does not raise an exception if the
directory does not exist. """
try:
self.rmtree()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOENT:
raise
return self
def chdir(self):
""" .. seealso:: :func:`os.chdir` """
os.chdir(self)
cd = chdir
#
# --- Special stuff from os
if hasattr(os, 'chroot'):
def chroot(self):
""" .. seealso:: :func:`os.chroot` """
os.chroot(self)
if hasattr(os, 'startfile'):
def startfile(self):
""" .. seealso:: :func:`os.startfile` """
os.startfile(self)
return self
# in-place re-writing, courtesy of Martijn Pieters
# http://www.zopatista.com/python/2013/11/26/inplace-file-rewriting/
@contextlib.contextmanager
def in_place(self, mode='r', buffering=-1, encoding=None, errors=None,
newline=None, backup_extension=None):
"""
A context in which a file may be re-written in-place with new content.
Yields a tuple of (readable, writable) file objects, where writable
replaces readable.
If an exception occurs, the old file is restored, removing the
written data.
Mode *must not* use 'w', 'a' or '+'; only read-only-modes are
allowed. A ValueError is raised on invalid modes.
For example, to add line numbers to a file::
p = path(filename)
assert p.isfile()
with p.in_place() as reader, writer:
for number, line in enumerate(reader, 1):
writer.write('{0:3}: '.format(number)))
writer.write(line)
Thereafter, the file at filename will have line numbers in it.
"""
import io
if set(mode).intersection('wa+'):
raise ValueError('Only read-only file modes can be used')
# move existing file to backup, create new file with same permissions
# borrowed extensively from the fileinput module
backup_fn = self + (backup_extension or os.extsep + 'bak')
try:
os.unlink(backup_fn)
except os.error:
pass
os.rename(self, backup_fn)
readable = io.open(backup_fn, mode, buffering=buffering,
encoding=encoding, errors=errors, newline=newline)
try:
perm = os.fstat(readable.fileno()).st_mode
except OSError:
writable = open(self, 'w' + mode.replace('r', ''),
buffering=buffering, encoding=encoding, errors=errors,
newline=newline)
else:
os_mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
if hasattr(os, 'O_BINARY'):
os_mode |= os.O_BINARY
fd = os.open(self, os_mode, perm)
writable = io.open(fd, "w" + mode.replace('r', ''),
buffering=buffering, encoding=encoding, errors=errors,
newline=newline)
try:
if hasattr(os, 'chmod'):
os.chmod(self, perm)
except OSError:
pass
try:
yield readable, writable
except Exception:
# move backup back
readable.close()
writable.close()
try:
os.unlink(self)
except os.error:
pass
os.rename(backup_fn, self)
raise
else:
readable.close()
writable.close()
finally:
try:
os.unlink(backup_fn)
except os.error:
pass
class tempdir(path):
"""
A temporary directory via tempfile.mkdtemp, and constructed with the
same parameters that you can use as a context manager.
Example:
with tempdir() as d:
# do stuff with the path object "d"
# here the directory is deleted automatically
.. seealso:: :func:`tempfile.mkdtemp`
"""
@ClassProperty
@classmethod
def _next_class(cls):
return path
def __new__(cls, *args, **kwargs):
dirname = tempfile.mkdtemp(*args, **kwargs)
return super(tempdir, cls).__new__(cls, dirname)
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not exc_value:
self.rmtree()
def _permission_mask(mode):
"""
Convert a Unix chmod symbolic mode like 'ugo+rwx' to a function
suitable for applying to a mask to affect that change.
>>> mask = _permission_mask('ugo+rwx')
>>> mask(o554) == o777
True
>>> _permission_mask('go-x')(o777) == o766
True
"""
parsed = re.match('(?P<who>[ugo]+)(?P<op>[-+])(?P<what>[rwx]+)$', mode)
if not parsed:
raise ValueError("Unrecognized symbolic mode", mode)
spec_map = dict(r=4, w=2, x=1)
spec = reduce(operator.or_, [spec_map[perm]
for perm in parsed.group('what')])
# now apply spec to each in who
shift_map = dict(u=6, g=3, o=0)
mask = reduce(operator.or_, [spec << shift_map[subj]
for subj in parsed.group('who')])
op = parsed.group('op')
# if op is -, invert the mask
if op == '-':
mask ^= o777
op_map = {'+': operator.or_, '-': operator.and_}
return functools.partial(op_map[op], mask)
class CaseInsensitivePattern(unicode):
"""
A string with a 'normcase' property, suitable for passing to
:meth:`listdir`, :meth:`dirs`, :meth:`files`, :meth:`walk`,
:meth:`walkdirs`, or :meth:`walkfiles` to match case-insensitive.
For example, to get all files ending in .py, .Py, .pY, or .PY in the
current directory::
from path import path, CaseInsensitivePattern as ci
path('.').files(ci('*.py'))
"""
@property
def normcase(self):
return __import__('ntpath').normcase
|
PierreBdR/point_tracker
|
point_tracker/path.py
|
Python
|
gpl-2.0
| 49,237
|
from pyspark import SparkContext
from pyspark import SparkConf
from pyspark.streaming import StreamingContext
from pyspark.streaming.kafka import KafkaUtils
from pyspark.sql import SQLContext, Row
from pyspark.sql.types import *
from cassandra.cluster import Cluster
from cassandra import ConsistencyLevel
from cqlengine import connection
from cqlengine import columns
from cqlengine.models import Model
from cqlengine.management import sync_table
from datetime import datetime
import json
conf = SparkConf().setAppName("Finance News, Stream Twitter").set("spark.cores.max", "2")
sc = SparkContext(conf=conf)
ssc = StreamingContext(sc, 1)
def getSqlContextInstance(sparkContext):
if ('sqlContextSingletonInstance' not in globals()):
globals()['sqlContextSingletonInstance'] = SQLContext(sparkContext)
return globals()['sqlContextSingletonInstance']
kafkaStream = KafkaUtils.createStream(ssc, "ec2-54-215-247-116.us-west-1.compute.amazonaws.com:2181", "twitter_stream", {"twitter": 1})
lines = kafkaStream.map(lambda x: x[1])
# connect to cassandra
cluster = Cluster(['ec2-54-215-237-86.us-west-1.compute.amazonaws.com'])
session = cluster.connect("finance_news")
st_news = session.prepare("INSERT INTO news (company, summary, newstime, author, newsoutlet, source) VALUES (?,?,?,?,?,?) USING TTL 7776000") #let news live for 90 days in the database
def process(rdd):
sqlContext = getSqlContextInstance(rdd.context)
rowRdd = rdd.map(lambda w: Row(summary=json.loads(w)["summary"],
source=json.loads(w)["source"],
newsoutlet=json.loads(w)["newsoutlet"],
author=json.loads(w)["author"],
company=json.loads(w)["company"].replace('$', ''),
newstime=datetime.strptime(json.loads(w)["newstime"].encode('utf-8'), "%a %b %d %H:%M:%S %Y")))
df_news = sqlContext.createDataFrame(rowRdd)
for row in df_news.collect():
session.execute(st_news, (row.company, row.summary, row.newstime, row.author, row.newsoutlet, row.source, ))
lines.foreachRDD(process)
ssc.start()
ssc.awaitTermination()
|
andyikchu/insightproject
|
realtime_processing/twitter_stream.py
|
Python
|
gpl-2.0
| 2,063
|
## begin license ##
#
# "Weightless" is a High Performance Asynchronous Networking Library. See http://weightless.io
#
# Copyright (C) 2012-2013, 2017, 2020-2021 Seecr (Seek You Too B.V.) https://seecr.nl
#
# This file is part of "Weightless"
#
# "Weightless" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Weightless" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Weightless"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
import sys
from contextlib import contextmanager
from functools import wraps
from io import StringIO
def _set_replaced_stream(name, replacement=None):
stream = getattr(sys, name)
def andBackAgain():
setattr(sys, name, stream)
streamReplacement = StringIO() if replacement is None else replacement
setattr(sys, name, streamReplacement)
return streamReplacement, andBackAgain
class _ContextMngrOrDecorated(object):
def __init__(self, streamName, replacement=None):
self._streamName = streamName
self._replacement = replacement
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
with self:
return func(*args, **kwargs)
return wrapper
def __enter__(self):
mockStream, self._back = _set_replaced_stream(self._streamName, self._replacement)
return mockStream
def __exit__(self, exc_type, exc_value, traceback):
self._back()
return False
def stderr_replaced(*func_arg):
if func_arg:
return _ContextMngrOrDecorated(streamName='stderr')(*func_arg)
return _ContextMngrOrDecorated(streamName='stderr')
def stdout_replaced(*func_arg):
if func_arg:
return _ContextMngrOrDecorated(streamName='stdout')(*func_arg)
return _ContextMngrOrDecorated(streamName='stdout')
def stdin_replaced(inStream=None):
return _ContextMngrOrDecorated(streamName='stdin', replacement=inStream)
|
seecr/weightless-core
|
test/lib/seecr-test-2.0/seecr/test/io.py
|
Python
|
gpl-2.0
| 2,438
|
"""urlconf for the base application"""
from django.conf.urls import url, patterns
urlpatterns = patterns('base.views',
url(r'^$', 'home', name='home'),
)
|
kralla/django-base
|
base/urls.py
|
Python
|
gpl-2.0
| 180
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <headingcell level=1>
# Wright-Fisher model of mutation, selection and random genetic drift
# <markdowncell>
# A Wright-Fisher model has a fixed population size *N* and discrete non-overlapping generations. Each generation, each individual has a random number of offspring whose mean is proportional to the individual's fitness. Each generation, mutation may occur. Mutations may increase or decrease individual's fitness, which affects the chances of that individual's offspring in subsequent generations.
# <markdowncell>
# Here, I'm using a fitness model where some proportion of the time a mutation will have a fixed fitness effect, increasing or decreasing fitness by a fixed amount.
# <headingcell level=2>
# Setup
# <codecell>
import numpy as np
import itertools
# <headingcell level=2>
# Make population dynamic model
# <headingcell level=3>
# Basic parameters
# <codecell>
pop_size = 100
# <codecell>
seq_length = 10
# <codecell>
alphabet = ['A', 'T']
# <codecell>
base_haplotype = "AAAAAAAAAA"
# <codecell>
fitness_effect = 1.1 # fitness effect if a functional mutation occurs
# <codecell>
fitness_chance = 0.1 # chance that a mutation has a fitness effect
# <headingcell level=3>
# Population of haplotypes maps to counts and fitnesses
# <markdowncell>
# Store this as a lightweight Dictionary that maps a string to a count. All the sequences together will have count *N*.
# <codecell>
pop = {}
# <codecell>
pop["AAAAAAAAAA"] = 40
# <codecell>
pop["AAATAAAAAA"] = 30
# <codecell>
pop["AATTTAAAAA"] = 30
# <markdowncell>
# *Map haplotype string to fitness float.*
# <codecell>
fitness = {}
# <codecell>
fitness["AAAAAAAAAA"] = 1.0
# <codecell>
fitness["AAATAAAAAA"] = 1.05
# <codecell>
fitness["AATTTAAAAA"] = 1.10
# <codecell>
pop["AAATAAAAAA"]
# <codecell>
fitness["AAATAAAAAA"]
# <headingcell level=3>
# Add mutation
# <codecell>
mutation_rate = 0.005 # per gen per individual per site
# <codecell>
def get_mutation_count():
mean = mutation_rate * pop_size * seq_length
return np.random.poisson(mean)
# <codecell>
def get_random_haplotype():
haplotypes = pop.keys()
frequencies = [x/float(pop_size) for x in pop.values()]
total = sum(frequencies)
frequencies = [x / total for x in frequencies]
return np.random.choice(haplotypes, p=frequencies)
# <codecell>
def get_mutant(haplotype):
site = np.random.randint(seq_length)
possible_mutations = list(alphabet)
possible_mutations.remove(haplotype[site])
mutation = np.random.choice(possible_mutations)
new_haplotype = haplotype[:site] + mutation + haplotype[site+1:]
return new_haplotype
# <markdowncell>
# *Mutations have fitness effects*
# <codecell>
def get_fitness(haplotype):
old_fitness = fitness[haplotype]
if (np.random.random() < fitness_chance):
return old_fitness * fitness_effect
else:
return old_fitness
# <codecell>
get_fitness("AAAAAAAAAA")
# <markdowncell>
# *If a mutation event creates a new haplotype, assign it a random fitness.*
# <codecell>
def mutation_event():
haplotype = get_random_haplotype()
if pop[haplotype] > 1:
pop[haplotype] -= 1
new_haplotype = get_mutant(haplotype)
if new_haplotype in pop:
pop[new_haplotype] += 1
else:
pop[new_haplotype] = 1
if new_haplotype not in fitness:
fitness[new_haplotype] = get_fitness(haplotype)
# <codecell>
mutation_event()
# <codecell>
pop
# <codecell>
fitness
# <codecell>
def mutation_step():
mutation_count = get_mutation_count()
for i in range(mutation_count):
mutation_event()
# <headingcell level=3>
# Genetic drift and fitness affect which haplotypes make it to the next generation
# <markdowncell>
# *Fitness weights the multinomial draw.*
# <codecell>
def get_offspring_counts():
haplotypes = pop.keys()
frequencies = [pop[haplotype]/float(pop_size) for haplotype in haplotypes]
fitnesses = [fitness[haplotype] for haplotype in haplotypes]
weights = [x * y for x,y in zip(frequencies, fitnesses)]
total = sum(weights)
weights = [x / total for x in weights]
return list(np.random.multinomial(pop_size, weights))
# <codecell>
get_offspring_counts()
# <codecell>
def offspring_step():
counts = get_offspring_counts()
for (haplotype, count) in zip(pop.keys(), counts):
if (count > 0):
pop[haplotype] = count
else:
del pop[haplotype]
# <headingcell level=3>
# Combine and iterate
# <codecell>
def time_step():
mutation_step()
offspring_step()
# <codecell>
generations = 5
# <codecell>
def simulate():
for i in range(generations):
time_step()
# <headingcell level=3>
# Record
# <markdowncell>
# We want to keep a record of past population frequencies to understand dynamics through time. At each step in the simulation, we append to a history object.
# <codecell>
history = []
# <codecell>
def simulate():
clone_pop = dict(pop)
history.append(clone_pop)
for i in range(generations):
time_step()
clone_pop = dict(pop)
history.append(clone_pop)
# <codecell>
simulate()
# <headingcell level=2>
# Analyze trajectories
# <headingcell level=3>
# Calculate diversity
# <codecell>
def get_distance(seq_a, seq_b):
diffs = 0
length = len(seq_a)
assert len(seq_a) == len(seq_b)
for chr_a, chr_b in zip(seq_a, seq_b):
if chr_a != chr_b:
diffs += 1
return diffs / float(length)
# <codecell>
def get_diversity(population):
haplotypes = population.keys()
haplotype_count = len(haplotypes)
diversity = 0
for i in range(haplotype_count):
for j in range(haplotype_count):
haplotype_a = haplotypes[i]
haplotype_b = haplotypes[j]
frequency_a = population[haplotype_a] / float(pop_size)
frequency_b = population[haplotype_b] / float(pop_size)
frequency_pair = frequency_a * frequency_b
diversity += frequency_pair * get_distance(haplotype_a, haplotype_b)
return diversity
# <codecell>
def get_diversity_trajectory():
trajectory = [get_diversity(generation) for generation in history]
return trajectory
# <headingcell level=3>
# Plot diversity
# <codecell>
%matplotlib inline
import matplotlib.pyplot as plt
import matplotlib as mpl
# <codecell>
def diversity_plot():
mpl.rcParams['font.size']=14
trajectory = get_diversity_trajectory()
plt.plot(trajectory, "#447CCD")
plt.ylabel("diversity")
plt.xlabel("generation")
# <headingcell level=3>
# Analyze and plot divergence
# <codecell>
def get_divergence(population):
haplotypes = population.keys()
divergence = 0
for haplotype in haplotypes:
frequency = population[haplotype] / float(pop_size)
divergence += frequency * get_distance(base_haplotype, haplotype)
return divergence
# <codecell>
def get_divergence_trajectory():
trajectory = [get_divergence(generation) for generation in history]
return trajectory
# <codecell>
def divergence_plot():
mpl.rcParams['font.size']=14
trajectory = get_divergence_trajectory()
plt.plot(trajectory, "#447CCD")
plt.ylabel("divergence")
plt.xlabel("generation")
# <headingcell level=3>
# Plot haplotype trajectories
# <codecell>
def get_frequency(haplotype, generation):
pop_at_generation = history[generation]
if haplotype in pop_at_generation:
return pop_at_generation[haplotype]/float(pop_size)
else:
return 0
# <codecell>
def get_trajectory(haplotype):
trajectory = [get_frequency(haplotype, gen) for gen in range(generations)]
return trajectory
# <codecell>
def get_all_haplotypes():
haplotypes = set()
for generation in history:
for haplotype in generation:
haplotypes.add(haplotype)
return haplotypes
# <codecell>
colors = ["#781C86", "#571EA2", "#462EB9", "#3F47C9", "#3F63CF", "#447CCD", "#4C90C0", "#56A0AE", "#63AC9A", "#72B485", "#83BA70", "#96BD60", "#AABD52", "#BDBB48", "#CEB541", "#DCAB3C", "#E49938", "#E68133", "#E4632E", "#DF4327", "#DB2122"]
# <codecell>
colors_lighter = ["#A567AF", "#8F69C1", "#8474D1", "#7F85DB", "#7F97DF", "#82A8DD", "#88B5D5", "#8FC0C9", "#97C8BC", "#A1CDAD", "#ACD1A0", "#B9D395", "#C6D38C", "#D3D285", "#DECE81", "#E8C77D", "#EDBB7A", "#EEAB77", "#ED9773", "#EA816F", "#E76B6B"]
# <codecell>
def stacked_trajectory_plot(xlabel="generation"):
mpl.rcParams['font.size']=18
haplotypes = get_all_haplotypes()
trajectories = [get_trajectory(haplotype) for haplotype in haplotypes]
plt.stackplot(range(generations), trajectories, colors=colors_lighter)
plt.ylim(0, 1)
plt.ylabel("frequency")
plt.xlabel(xlabel)
# <headingcell level=3>
# Plot SNP trajectories
# <codecell>
def get_snp_frequency(site, generation):
minor_allele_frequency = 0.0
pop_at_generation = history[generation]
for haplotype in pop_at_generation.keys():
allele = haplotype[site]
frequency = pop_at_generation[haplotype] / float(pop_size)
if allele != "A":
minor_allele_frequency += frequency
return minor_allele_frequency
# <codecell>
def get_snp_trajectory(site):
trajectory = [get_snp_frequency(site, gen) for gen in range(generations)]
return trajectory
# <markdowncell>
# Find all variable sites.
# <codecell>
def get_all_snps():
snps = set()
for generation in history:
for haplotype in generation:
for site in range(seq_length):
if haplotype[site] != "A":
snps.add(site)
return snps
# <codecell>
def snp_trajectory_plot(xlabel="generation"):
mpl.rcParams['font.size']=18
snps = get_all_snps()
trajectories = [get_snp_trajectory(snp) for snp in snps]
data = []
for trajectory, color in itertools.izip(trajectories, itertools.cycle(colors)):
data.append(range(generations))
data.append(trajectory)
data.append(color)
fig = plt.plot(*data)
plt.ylim(0, 1)
plt.ylabel("frequency")
plt.xlabel(xlabel)
# <headingcell level=2>
# Scale up
# <markdowncell>
# Here, we scale up to more interesting parameter values.
# <codecell>
pop_size = 50
seq_length = 100
generations = 500
mutation_rate = 0.0001 # per gen per individual per site
fitness_effect = 1.1 # fitness effect if a functional mutation occurs
fitness_chance = 0.1 # chance that a mutation has a fitness effect
# <markdowncell>
# In this case there are $\mu$ = 0.01 mutations entering the population every generation.
# <codecell>
seq_length * mutation_rate
# <markdowncell>
# And the population genetic parameter $\theta$, which equals $2N\mu$, is 1.
# <codecell>
2 * pop_size * seq_length * mutation_rate
# <codecell>
base_haplotype = ''.join(["A" for i in range(seq_length)])
pop.clear()
fitness.clear()
del history[:]
pop[base_haplotype] = pop_size
fitness[base_haplotype] = 1.0
# <codecell>
simulate()
# <codecell>
plt.figure(num=None, figsize=(14, 14), dpi=80, facecolor='w', edgecolor='k')
plt.subplot2grid((3,2), (0,0), colspan=2)
stacked_trajectory_plot()
plt.subplot2grid((3,2), (1,0), colspan=2)
snp_trajectory_plot()
plt.subplot2grid((3,2), (2,0))
diversity_plot()
plt.subplot2grid((3,2), (2,1))
divergence_plot()
|
alvason/probability-insighter
|
code/mutation-drift-selection.py
|
Python
|
gpl-2.0
| 11,460
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005-2008 Francisco José Rodríguez Bogado, #
# Diego Muñoz Escalante. #
# (pacoqueen@users.sourceforge.net, escalant3@users.sourceforge.net) #
# #
# This file is part of GeotexInn. #
# #
# GeotexInn is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# GeotexInn is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GeotexInn; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
###################################################################
## consulta_partidas_por_producto.py
###################################################################
## NOTAS:
##
###################################################################
## Changelog:
## 4 de abril de 2006 -> Inicio
##
###################################################################
from ventana import Ventana
from formularios import utils
import pygtk
pygtk.require('2.0')
import gtk, time
from framework import pclases
import mx.DateTime
from informes import geninformes
from ventana_progreso import VentanaActividad
class ConsultaPartidasPorProducto(Ventana):
def __init__(self, objeto = None, usuario = None):
"""
Constructor. objeto puede ser un objeto de pclases con el que
comenzar la ventana (en lugar del primero de la tabla, que es
el que se muestra por defecto).
"""
global fin
Ventana.__init__(self, 'consulta_partidas_por_producto.glade', objeto, usuario = usuario)
connections = {'b_salir/clicked': self.salir,
'b_buscar/clicked': self.buscar,
'b_imprimir/clicked': self.imprimir,
'b_fecha_inicio/clicked': self.set_inicio,
'b_fecha_fin/clicked': self.set_fin,
"b_exportar/clicked": self.exportar}
self.add_connections(connections)
cols = (('Num. Partida','gobject.TYPE_INT64',False,True,False,None),
('Código','gobject.TYPE_STRING',False,True,False,None),
('Fecha fab.','gobject.TYPE_STRING',False,True,False,None),
('Longitudinal','gobject.TYPE_STRING',False,True,False,None),
('Transversal','gobject.TYPE_STRING',False,True,False,None),
('Compresión','gobject.TYPE_STRING',False,True,False,None),
('Perforación','gobject.TYPE_STRING',False,True,False,None),
('Permeabilidad','gobject.TYPE_STRING',False,True,False,None),
('Poros','gobject.TYPE_STRING',False,True,False,None),
('Espesor','gobject.TYPE_STRING',False,True,False,None),
('Piramidal','gobject.TYPE_STRING',False,True,False,None),
('Idpartida','gobject.TYPE_INT64',False,False,False,None))
utils.preparar_listview(self.wids['tv_datos'], cols)
self.wids['tv_datos'].connect("row-activated", self.abrir_parte_tv)
utils.rellenar_lista(self.wids['cmbe_producto'], [(p.id, p.descripcion) for p in pclases.ProductoVenta.select(pclases.ProductoVenta.q.camposEspecificosRolloID != None, orderBy = 'descripcion')])
temp = time.localtime()
self.fin = mx.DateTime.localtime()
self.inicio = None
self.resultado = []
self.wids['e_fechafin'].set_text(utils.str_fecha(temp))
gtk.main()
def exportar(self, boton):
"""
Exporta el contenido del TreeView a un fichero csv.
"""
from informes.treeview2csv import treeview2csv
from formularios.reports import abrir_csv
tv = self.wids['tv_datos']
abrir_csv(treeview2csv(tv))
def chequear_cambios(self):
pass
def rellenar_tabla(self,lista = []):
"""
Rellena el model con los resultados de la búsqueda almacenados
en una lista de partidas.
"""
model = self.wids['tv_datos'].get_model()
self.wids['tv_datos'].freeze_child_notify()
self.wids['tv_datos'].set_model(None)
model.clear()
for elem in lista:
model.append((elem.numpartida,
elem.codigo,
# Fecha de fabricación del primero de los artículos del lote
elem.rollos[0].articulos[0].parteDeProduccion and \
utils.str_fecha(elem.rollos[0].articulos[0].parteDeProduccion.fecha) or \
"¡PARTE NO ENCONTRADO!",
"%.2f" % elem.longitudinal,
"%.2f" % elem.transversal,
"%.2f" % elem.compresion,
"%.2f" % elem.perforacion,
"%.2f" % elem.permeabilidad,
"%.2f" % elem.poros,
"%.2f" % elem.espesor,
"%.2f" % elem.piramidal,
elem.id))
# elem.rollos[0].articulos[0].parteDeProduccion and \
# elem.rollos[0].articulos[0].parteDeProduccion.id or \
# -1))
self.wids['tv_datos'].set_model(model)
self.wids['tv_datos'].thaw_child_notify()
def set_inicio(self,boton):
temp = utils.mostrar_calendario(padre = self.wids['ventana'])
self.wids['e_fechainicio'].set_text(utils.str_fecha(temp))
self.inicio = mx.DateTime.DateTimeFrom(day = temp[0], month = temp[1], year = temp[2])
def set_fin(self,boton):
temp = utils.mostrar_calendario(padre = self.wids['ventana'])
self.wids['e_fechafin'].set_text(utils.str_fecha(temp))
self.fin = mx.DateTime.DateTimeFrom(day = temp[0], month = temp[1], year = temp[2])
def por_fecha(self,e1,e2):
"""
Permite ordenar una lista de albaranes por fecha
"""
if e1.fecha < e2.fecha:
return -1
elif e1.fecha > e2.fecha:
return 1
else:
return 0
def get_unambiguous_fecha(self, fecha):
try:
res = fecha.strftime('%B %d, %Y')
except AttributeError: # Fecha es None
return ""
trans = {'January': 'enero',
'February': 'febrero',
'March': 'marzo',
'April': 'abril',
'May': 'mayo',
'June': 'junio',
'July': 'julio',
'August': 'agosto',
'September': 'septiembre',
'October': 'octubre',
'November': 'noviembre',
'December': 'diciembre'}
for in_english in trans:
res = res.replace(trans[in_english], in_english)
return res
def buscar(self,boton):
"""
"""
idproducto = utils.combo_get_value(self.wids['cmbe_producto'])
if idproducto == None:
utils.dialogo_info(titulo = 'ERROR',
texto = 'Seleccione un producto',
padre = self.wids['ventana'])
return
producto = pclases.ProductoVenta.get(idproducto)
and_fecha_inicio = "AND parte_de_produccion.fecha >= '%s'" % (self.get_unambiguous_fecha(self.inicio))
if producto.es_rollo():
parte_where_de_consulta = """
partida.id IN
(SELECT rollo.partida_id
FROM rollo
WHERE rollo.id IN
(SELECT articulo.rollo_id
FROM articulo
WHERE articulo.producto_venta_id = %d AND articulo.parte_de_produccion_id IN
(SELECT parte_de_produccion.id
FROM parte_de_produccion
WHERE parte_de_produccion.fecha <= '%s' %s
ORDER BY parte_de_produccion.fecha
)
)
) """ % (producto.id,
self.get_unambiguous_fecha(self.fin),
self.inicio and and_fecha_inicio or "")
else:
parte_where_de_consulta = """
partida.id IN
(SELECT bala.partida_carga_id
FROM bala
WHERE bala.id IN
(SELECT articulo.rollo_id
FROM articulo
WHERE articulo.producto_venta_id = %d AND articulo.parte_de_produccion_id IN
(SELECT parte_de_produccion.id
FROM parte_de_produccion
WHERE parte_de_produccion.fecha <= '%s' %s
ORDER BY parte_de_produccion.fecha
)
)
) """ % (producto.id,
self.get_unambiguous_fecha(self.fin),
self.inicio and and_fecha_inicio or "")
partidas = pclases.Partida.select(parte_where_de_consulta, distinct = True)
# Hasta aquí la consulta optimizada para obtener las partidas. Pasamos a recuperar los datos en sí:
vpro = VentanaActividad(padre = self.wids['ventana'])
vpro.mostrar()
self.resultado = []
for p in partidas:
vpro.mover()
self.resultado.append(p)
vpro.ocultar()
self.resultado = partidas
self.rellenar_tabla(self.resultado)
def abrir_parte_tv(self, treeview, path, view_column):
idpartida = treeview.get_model()[path][-1]
partida = pclases.Partida.get(idpartida)
try:
parte = partida.rollos[0].articulos[0].parteDeProduccion
except AttributeError, e:
print "No se encontró el parte: %s", e
if parte.es_de_balas():
from formularios import partes_de_fabricacion_balas
ventana_parteb = partes_de_fabricacion_balas.PartesDeFabricacionBalas(parte) # @UnusedVariable
else:
from formularios import partes_de_fabricacion_rollos
ventana_parteb = partes_de_fabricacion_rollos.PartesDeFabricacionRollos(parte) # @UnusedVariable
def imprimir(self,boton):
"""
Prepara la vista preliminar para la impresión del informe
"""
from formularios import reports
datos = []
lista = self.resultado
for elem in lista:
datos.append((elem.numpartida,
"%.2f" % elem.longitudinal,
"%.2f" % elem.transversal,
"%.2f" % elem.compresion,
"%.2f" % elem.perforacion,
"%.2f" % elem.permeabilidad,
"%.2f" % elem.poros,
"%.2f" % elem.piramidal,
"%.2f" % elem.espesor))
if (self.inicio) == None:
fechaInforme = 'Hasta '+utils.str_fecha(self.fin)
else:
fechaInforme = (utils.str_fecha(self.inicio) + ' - ' +
utils.str_fecha(self.fin))
if datos != []:
reports.abrir_pdf(geninformes.laboratorioPartidas(datos,
fechaInforme))
if __name__ == '__main__':
t = ConsultaPartidasPorProducto()
|
pacoqueen/ginn
|
ginn/formularios/consulta_partidas_por_producto.py
|
Python
|
gpl-2.0
| 12,338
|
__author__ = 'Marko Milutinovic'
"""
This class will implement an Arithmetic Coding decoder
"""
import array
import utils
import math
class ARDecoder:
BITS_IN_BYTE = 8
def __init__(self, wordSize_, vocabularySize_, terminationSymbol_):
"""
Initialize the object
:param wordSize_: The word size (bits) that will be used for compression. Must be greater than 2 and less than 16
:param: vocabularySize_: The size of the vocabulary. Symbols run rom 0 to (vocabularySize -1)
:param terminationSymbol_: Symbol which indicates the end of encoded data where decoding should stop. This is required to properly terminate decoding
:return: None
"""
self.mMaxDecodingBytes = utils.calculateMaxBytes(wordSize_) # The max number of bytes we can decode before the statistics need to be re-normalized
self.mVocabularySize = vocabularySize_
self.mTerminationSymbol = terminationSymbol_
if(self.mMaxDecodingBytes == 0):
raise Exception("Invalid word size specified")
self.mWordSize = wordSize_ # The tag word size
self.mWordBitMask = 0x0000 # The word size bit-mask
self.mWordMSBMask = (0x0000 | (1 << (self.mWordSize - 1))) # The bit mask for the top bit of the word
self.mWordSecondMSBMask = (0x0000 | (1 << (self.mWordSize - 2))) # The bit mask for the second most significant bit of the word
# Create bit mask for the word size
for i in range(0, self.mWordSize):
self.mWordBitMask = (self.mWordBitMask << 1) | 0x0001
# We are initializing with an assumption of a value of 1 for the count of each symbol.
self.mSymbolCount = array.array('i', [1]*self.mVocabularySize)
# Reset member variables that are not constant
self.reset()
def reset(self):
""" Reset all the member variables that are not constant for the duration of the object life
:return: None
"""
self.mEncodedData = None # Holds the encoded data that we are un-compressing. Bytearray
self.mEncodedDataCount = 0 # Number of encoded bytes that we are un-compressing
self.mDecodedData = None # Holds the data being decoded
self.mDecodedDataLen = 0 # The number of symbols that have been decoded
self.mCurrentEncodedDataByteIndex = 0 # Index of the encoded data with are currently working with
self.mCurrentEncodedDataBit = 0 # The current bit of the current byte we are using from the encoded data bytearray
self.mTotalSymbolCount = self.mVocabularySize # The total number of symbols encountered
self.mLowerTag = 0 # The lower tag threshold
self.mUpperTag = self.mWordBitMask # The upper tag threshold
self.mCurrentTag = 0 # The current tag we are processing
# We are initializing with an assumption of a value of 1 for the count of each symbol
for i in range(0,self.mVocabularySize):
self.mSymbolCount[i] = 1
def _get_next_bit(self):
"""
Get the next bit from encoded data (MSB first). If we move past the current byte move index over to the next one.
Once there is no more data return None
:return: next bit value or None if there is no more data
"""
if(self.mCurrentEncodedDataByteIndex >= self.mEncodedDataCount):
raise Exception("Exceeded encoded data buffer")
bitValue = (self.mEncodedData[self.mCurrentEncodedDataByteIndex] >> (self.BITS_IN_BYTE - 1 - self.mCurrentEncodedDataBit)) & 0x0001
self.mCurrentEncodedDataBit += 1
# If we have used all the bits in the current byte, move to the next byte
if(self.mCurrentEncodedDataBit == self.BITS_IN_BYTE):
self.mCurrentEncodedDataByteIndex += 1
self.mCurrentEncodedDataBit = 0
return bitValue
def _increment_count(self, indexToIncrement_):
"""
Update the count for the provided index. Update
the total symbol count as well. If we exceed the max symbol count normalize the stats
:param indexToIncrement_: The index which we are updating
:return: None
"""
self.mSymbolCount[indexToIncrement_] += 1
self.mTotalSymbolCount += 1
# If we have reached the max number of bytes, we need to normalize the stats to allow us to continue
if(self.mTotalSymbolCount >= self.mMaxDecodingBytes):
self._normalize_stats()
def _rescale(self):
"""
Perform required rescale operation on the upper, lower and current tags. The following scaling operations are performed:
E1: both the upper and lower ranges fall into the bottom half of full range [0, 0.5). First bit is 0 for both.
Shift out MSB for both and shift in 1 for upper tag and 0 for lower tag. Shift the current tag to left by 1 and move in next bit
E2: both the upper and lower ranges fall into the top half of full range [0.5, 1). First bit is 1 for both.
Shift out MSB for both and shift in 1 for upper tag and 0 for lower tag. Shift the current tag to left by 1 and move in next bit
E3: the upper and lower tag interval lies in the middle [0.25, 0.75). The second MSB of upper tag is 0 and the second bit of the lower tag is 1.
Complement second MSB bit of both and shift in 1 for upper tag and 0 for lower tag. Complement second MSB of the current tag, shift to the left by 1 and move in the next bit
:return:None
"""
sameMSB = ((self.mLowerTag & self.mWordMSBMask) == (self.mUpperTag & self.mWordMSBMask))
valueMSB = ((self.mLowerTag & self.mWordMSBMask) >> (self.mWordSize -1)) & 0x0001
tagRangeInMiddle = (((self.mUpperTag & self.mWordSecondMSBMask) == 0) and ((self.mLowerTag & self.mWordSecondMSBMask) == self.mWordSecondMSBMask))
while(sameMSB or tagRangeInMiddle):
# If the first bit is the same we need to perform E1 or E2 scaling. The same set of steps applies to both. If the range is in the middle we need to perform E3 scaling
if(sameMSB):
self.mLowerTag = (self.mLowerTag << 1) & self.mWordBitMask
self.mUpperTag = ((self.mUpperTag << 1) | 0x0001) & self.mWordBitMask
self.mCurrentTag = ((self.mCurrentTag << 1) | self._get_next_bit()) & self.mWordBitMask
elif(tagRangeInMiddle):
self.mLowerTag = (self.mLowerTag << 1) & self.mWordBitMask
self.mUpperTag = (self.mUpperTag << 1) & self.mWordBitMask
self.mCurrentTag = ((self.mCurrentTag << 1) | self._get_next_bit()) & self.mWordBitMask
self.mLowerTag = ((self.mLowerTag & (~self.mWordMSBMask)) | ((~self.mLowerTag) & self.mWordMSBMask))
self.mUpperTag = ((self.mUpperTag & (~self.mWordMSBMask)) | ((~self.mUpperTag) & self.mWordMSBMask))
self.mCurrentTag = ((self.mCurrentTag & (~self.mWordMSBMask)) | ((~self.mCurrentTag) & self.mWordMSBMask))
sameMSB = ((self.mLowerTag & self.mWordMSBMask) == (self.mUpperTag & self.mWordMSBMask))
valueMSB = ((self.mLowerTag & self.mWordMSBMask) >> (self.mWordSize -1)) & 0x0001
tagRangeInMiddle = (((self.mUpperTag & self.mWordSecondMSBMask) == 0) and ((self.mLowerTag & self.mWordSecondMSBMask) == self.mWordSecondMSBMask))
def _update_range_tags(self, currentSymbolIndex_, cumulativeCountSymbol_):
"""
Update the upper and lower tags according to stats for the incoming symbol
:param newSymbol_: Current symbol being encoded
:param cumulativeCountSymbol_: The cumulative count of the current symbol
:return: None
"""
prevLowerTag = self.mLowerTag
prevUpperTag = self.mUpperTag
rangeDiff = prevUpperTag - prevLowerTag
cumulativeCountPrevSymbol = cumulativeCountSymbol_ - self.mSymbolCount[currentSymbolIndex_]
self.mLowerTag = int((prevLowerTag + math.floor(((rangeDiff + 1)*cumulativeCountPrevSymbol))/self.mTotalSymbolCount))
self.mUpperTag = int((prevLowerTag + math.floor(((rangeDiff + 1)*cumulativeCountSymbol_))/self.mTotalSymbolCount - 1))
self._increment_count(currentSymbolIndex_)
def _normalize_stats(self):
"""
Divide the total count for each symbol by 2 but ensure each symbol count is at least 1.
Get new total symbol count from the entries
:return: None
"""
self.mTotalSymbolCount = 0
# Go through all the entries in the cumulative count array
for i in range(0, self.mVocabularySize):
value = int(self.mSymbolCount[i]/2)
# Ensure the count is at least 1
if(value == 0):
value = 1
self.mSymbolCount[i] = value
self.mTotalSymbolCount += value
def decode(self, encodedData_, encodedDataLen_, decodedData_, maxDecodedDataLen_):
"""
Decompress the data passed in. It is the responsibility of the caller to reset the decoder if required before
calling this function
:param encodedData_: The data that needs to be decoded (bytearray)
:param encodedDataLen_: The length of data that needs to be decoded
:param decodedData_: The decoded data (integer array)
:param maxDecodedDatalen_ : The max number of symbols that can be stored in decodedData_ array
:param firstDataBlock: If this is True then mCurrentTag must be loaded
:return: Returns the number of symbols stored in decodedData_
"""
# If the byte array is smaller than data length pass in throw exception
if(len(encodedData_) < encodedDataLen_):
raise Exception("Data passed in smaller than expected")
# If the byte array is smaller than data length pass in throw exception
if(len(decodedData_) < maxDecodedDataLen_):
raise Exception("Decompressed data byte array passed in smaller than expected")
self.mEncodedData = encodedData_
self.mEncodedDataCount = encodedDataLen_
self.mDecodedData = decodedData_
self.mDecodedDataLen = 0
self.mCurrentEncodedDataByteIndex = 0
self.mCurrentEncodedDataBit = 0
self.mCurrentTag = 0
# Load the first word size bits into the current tag
for i in range(0, self.mWordSize):
self.mCurrentTag = (self.mCurrentTag | (self._get_next_bit() << ((self.mWordSize - 1) - i)))
finished = False
# Until we have reached the end keep decompressing
while(not finished):
currentSymbol = 0
currentCumulativeCount = int(math.floor(((self.mCurrentTag - self.mLowerTag + 1)*self.mTotalSymbolCount - 1)/(self.mUpperTag - self.mLowerTag +1)))
symbolCumulativeCount = self.mSymbolCount[0]
while(currentCumulativeCount >= symbolCumulativeCount):
currentSymbol += 1
if(currentSymbol >= self.mVocabularySize):
raise Exception("Symbol count of out range")
symbolCumulativeCount += self.mSymbolCount[currentSymbol]
# If we have reached the termination symbol then decoding is finished, otherwise store the decompressed symbol
if(currentSymbol == self.mTerminationSymbol):
finished = True
else:
self.mDecodedData[self.mDecodedDataLen] = currentSymbol
self.mDecodedDataLen += 1
# If there is no more room extend the bytearray by BASE_OUT_SIZE bytes
if(self.mDecodedDataLen >= maxDecodedDataLen_):
raise Exception('Not enough space to store decoded data')
self._update_range_tags(currentSymbol, symbolCumulativeCount)
self._rescale()
return self.mDecodedDataLen
|
markomilutin/kompressor
|
ARDecoder.py
|
Python
|
gpl-2.0
| 12,579
|
from django.utils.unittest.case import TestCase
from scheduler.models import ScheduleGenerator
from uni_info.models import Semester, Course
class ScheduleGeneratorTest(TestCase):
"""
Test class for schedule generator, try different courses
"""
fixtures = ['/scheduler/fixtures/initial_data.json']
def setUp(self):
"""
Setup common data needed in each unit test
"""
self.fall_2013_semester = [sem for sem in Semester.objects.all() if sem.name == 'Fall 2013'][0]
def test_should_generate_empty_schedule(self):
"""
Test generator does not crash with empty list as edge case
"""
course_list = []
generator = ScheduleGenerator(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self.assertEqual(0, len(result))
def test_should_generate_with_1_course(self):
"""
Test generator with only 1 course as edge case
"""
soen341 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '341'][0]
course_list = [soen341]
generator = ScheduleGenerator(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self.assertEqual(2, len(result))
def test_should_generate_schedule_for_2_course(self):
"""
Test generator with more than 1 course
"""
soen341 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '341'][0]
soen287 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '287'][0]
course_list = [soen287, soen341]
generator = ScheduleGenerator(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self.assertEqual(4, len(result))
def test_should_not_generate_schedule_for_3_course_conflict(self):
"""
Test generator with three conflicting courses
"""
soen341 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '341'][0]
soen342 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '342'][0]
soen287 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '287'][0]
course_list = [soen287, soen341, soen342]
generator = ScheduleGenerator(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self.assertEqual(0, len(result))
def test_should_generate_schedule_for_3_course_no_conflict(self):
"""
Test generator with three courses that has no conflicts
"""
soen341 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '341'][0]
soen343 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '343'][0]
soen287 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '287'][0]
course_list = [soen287, soen341, soen343]
generator = ScheduleGenerator(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self.assertEqual(4, len(result))
|
squarebracket/star
|
scheduler/tests/schedule_generator_tests.py
|
Python
|
gpl-2.0
| 3,829
|
"""
This script is responsible for generating recommendations for the users. The general flow is as follows:
The best_model saved in HDFS is loaded with the help of model_id which is fetched from model_metadata_df.
`spark_user_id` and `recording_id` are fetched from top_artist_candidate_set_df and are given as input to the
recommender. An RDD of `user`, `product` and `rating` is returned from the recommender which is later converted to
a dataframe by filtering top X (an int supplied as an argument to the script) recommendations for all users sorted on rating
and fields renamed as `spark_user_id`, `recording_id` and `rating`. The ratings are scaled so that they lie between 0 and 1.
This dataframe is joined with recordings_df on recording_id to get the recording mbids which are then sent over the queue.
The same process is done for similar artist candidate set.
"""
import logging
import time
from py4j.protocol import Py4JJavaError
import listenbrainz_spark
from listenbrainz_spark import utils, path
from listenbrainz_spark.exceptions import (PathNotFoundException,
FileNotFetchedException,
SparkSessionNotInitializedException,
RecommendationsNotGeneratedException,
EmptyDataframeExcpetion)
from listenbrainz_spark.recommendations.recording.train_models import get_model_path
from listenbrainz_spark.recommendations.recording.candidate_sets import _is_empty_dataframe
from pyspark.sql import Row
import pyspark.sql.functions as func
from pyspark.sql.window import Window
from pyspark.sql.functions import col, udf, row_number
from pyspark.sql.types import DoubleType
from pyspark.mllib.recommendation import MatrixFactorizationModel
logger = logging.getLogger(__name__)
class RecommendationParams:
def __init__(self, recordings_df, model, top_artist_candidate_set_df, similar_artist_candidate_set_df,
recommendation_top_artist_limit, recommendation_similar_artist_limit):
self.recordings_df = recordings_df
self.model = model
self.top_artist_candidate_set_df = top_artist_candidate_set_df
self.similar_artist_candidate_set_df = similar_artist_candidate_set_df
self.recommendation_top_artist_limit = recommendation_top_artist_limit
self.recommendation_similar_artist_limit = recommendation_similar_artist_limit
def get_most_recent_model_id():
""" Get model id of recently created model.
Returns:
model_id (str): Model identification string.
"""
try:
model_metadata = utils.read_files_from_HDFS(path.RECOMMENDATION_RECORDING_MODEL_METADATA)
except PathNotFoundException as err:
logger.error(str(err), exc_info=True)
raise
except FileNotFetchedException as err:
logger.error(str(err), exc_info=True)
raise
latest_ts = model_metadata.select(func.max('model_created').alias('model_created')).take(1)[0].model_created
model_id = model_metadata.select('model_id') \
.where(col('model_created') == latest_ts).take(1)[0].model_id
return model_id
def load_model():
""" Load model from given path in HDFS.
"""
model_id = get_most_recent_model_id()
dest_path = get_model_path(model_id)
try:
model = MatrixFactorizationModel.load(listenbrainz_spark.context, dest_path)
return model
except Py4JJavaError as err:
logger.error('Unable to load model "{}"\n{}\nAborting...'.format(model_id, str(err.java_exception)),
exc_info=True)
raise
def get_recording_mbids(params: RecommendationParams, recommendation_df, users_df):
""" Get recording mbids corresponding to recommended recording ids sorted on rating.
Args:
params: RecommendationParams class object.
recommendation_df: Dataframe of spark_user_id, recording id and rating.
users_df : user_id and spark_user_id of active users.
Returns:
dataframe of recommended recording mbids and related info.
"""
df = params.recordings_df.join(recommendation_df, 'recording_id', 'inner') \
.select('rating',
'recording_mbid',
'spark_user_id')
recording_mbids_df = df.join(users_df, 'spark_user_id', 'inner')
window = Window.partitionBy('user_id').orderBy(col('rating').desc())
df = recording_mbids_df.withColumn('rank', row_number().over(window)) \
.select('recording_mbid',
'rank',
'rating',
'spark_user_id',
'user_id')
return df
def filter_recommendations_on_rating(df, limit):
""" Filter top X recommendations for each user on rating where X = limit.
Args:
df: Dataframe of user, product and rating.
limit (int): Number of recommendations to be filtered for each user.
Returns:
recommendation_df: Dataframe of spark_user_id, recording_id and rating.
"""
window = Window.partitionBy('user').orderBy(col('rating').desc())
recommendation_df = df.withColumn('rank', row_number().over(window)) \
.where(col('rank') <= limit) \
.select(col('rating'),
col('product').alias('recording_id'),
col('user').alias('spark_user_id'))
return recommendation_df
def generate_recommendations(candidate_set, params: RecommendationParams, limit):
""" Generate recommendations from the candidate set.
Args:
candidate_set (rdd): RDD of spark_user_id and recording_id.
params: RecommendationParams class object.
limit (int): Number of recommendations to be filtered for each user.
Returns:
recommendation_df: Dataframe of spark_user_id, recording_id and rating.
"""
recommendations = params.model.predictAll(candidate_set)
if recommendations.isEmpty():
raise RecommendationsNotGeneratedException('Recommendations not generated!')
df = listenbrainz_spark.session.createDataFrame(recommendations, schema=None)
recommendation_df = filter_recommendations_on_rating(df, limit)
return recommendation_df
def get_scale_rating_udf(rating):
""" Get user defined function (udf) to scale ratings so that they fall in the
range: 0.0 -> 1.0.
Args:
rating (float): score given to recordings by CF.
Returns:
rating udf.
"""
scaled_rating = (rating / 2.0) + 0.5
return round(min(max(scaled_rating, -1.0), 1.0), 3)
def scale_rating(df):
""" Scale the ratings column of dataframe so that they fall in the
range: 0.0 -> 1.0.
Args:
df: Dataframe to scale.
Returns:
df: Dataframe with scaled rating.
"""
scaling_udf = udf(get_scale_rating_udf, DoubleType())
df = df.withColumn("scaled_rating", scaling_udf(df.rating)) \
.select(col('recording_id'),
col('spark_user_id'),
col('scaled_rating').alias('rating'))
return df
def get_candidate_set_rdd_for_user(candidate_set_df, users):
""" Get candidate set RDD for a given user.
Args:
candidate_set_df: A dataframe of spark_user_id and recording_id for all users.
users: list of user names to generate recommendations for.
Returns:
candidate_set_rdd: An RDD of spark_user_id and recording_id for a given user.
"""
if users:
candidate_set_user_df = candidate_set_df.select('spark_user_id', 'recording_id') \
.where(col('user_id').isin(users))
else:
candidate_set_user_df = candidate_set_df.select('spark_user_id', 'recording_id')
if _is_empty_dataframe(candidate_set_user_df):
raise EmptyDataframeExcpetion('Empty Candidate sets!')
candidate_set_rdd = candidate_set_user_df.rdd.map(lambda r: (r['spark_user_id'], r['recording_id']))
return candidate_set_rdd
def get_user_name_and_user_id(params: RecommendationParams, users):
""" Get users from top artist candidate set.
Args:
params: RecommendationParams class object.
users = list of users names to generate recommendations.
Returns:
users_df: dataframe of user id and user names.
"""
if len(users) == 0:
users_df = params.top_artist_candidate_set_df.select('spark_user_id', 'user_id').distinct()
else:
users_df = params.top_artist_candidate_set_df.select('spark_user_id', 'user_id') \
.where(params.top_artist_candidate_set_df.user_id.isin(users)) \
.distinct()
if _is_empty_dataframe(users_df):
raise EmptyDataframeExcpetion('No active users found!')
return users_df
def check_for_ratings_beyond_range(top_artist_rec_df, similar_artist_rec_df):
""" Check if rating in top_artist_rec_df and similar_artist_rec_df does not belong to [-1, 1].
Args:
top_artist_rec_df (dataframe): Top artist recommendations for all users.
similar_artist_rec_df (dataframe): Similar artist recommendations for all users.
Returns:
a tuple of booleans (max out of range, min out of range)
"""
max_rating = top_artist_rec_df.select(func.max('rating').alias('rating')).take(1)[0].rating
max_rating = max(similar_artist_rec_df.select(func.max('rating').alias('rating')).take(1)[0].rating, max_rating)
min_rating = top_artist_rec_df.select(func.min('rating').alias('rating')).take(1)[0].rating
min_rating = min(similar_artist_rec_df.select(func.min('rating').alias('rating')).take(1)[0].rating, min_rating)
if max_rating > 1.0:
logger.info('Some ratings are greater than 1 \nMax rating: {}'.format(max_rating))
if min_rating < -1.0:
logger.info('Some ratings are less than -1 \nMin rating: {}'.format(min_rating))
return max_rating > 1.0, min_rating < -1.0
def create_messages(top_artist_rec_mbid_df, similar_artist_rec_mbid_df, active_user_count, total_time,
top_artist_rec_user_count, similar_artist_rec_user_count):
""" Create messages to send the data to the webserver via RabbitMQ.
Args:
top_artist_rec_mbid_df (dataframe): Top artist recommendations.
similar_artist_rec_mbid_df (dataframe): Similar artist recommendations.
active_user_count (int): Number of users active in the last week.
total_time (str): Time taken in exceuting the whole script.
top_artist_rec_user_count (int): Number of users for whom top artist recommendations were generated.
similar_artist_rec_user_count (int): Number of users for whom similar artist recommendations were generated.
Returns:
messages: A list of messages to be sent via RabbitMQ
"""
top_artist_rec_itr = top_artist_rec_mbid_df.toLocalIterator()
user_rec = {}
for row in top_artist_rec_itr:
if user_rec.get(row.user_id) is None:
user_rec[row.user_id] = {}
user_rec[row.user_id]['top_artist'] = [
{
"recording_mbid": row.recording_mbid,
"score": row.rating
}
]
user_rec[row.user_id]['similar_artist'] = []
else:
user_rec[row.user_id]['top_artist'].append(
{
"recording_mbid": row.recording_mbid,
"score": row.rating
}
)
similar_artist_rec_itr = similar_artist_rec_mbid_df.toLocalIterator()
for row in similar_artist_rec_itr:
if user_rec.get(row.user_id) is None:
user_rec[row.user_id] = {}
user_rec[row.user_id]['similar_artist'] = [
{
"recording_mbid": row.recording_mbid,
"score": row.rating
}
]
else:
user_rec[row.user_id]['similar_artist'].append(
{
"recording_mbid": row.recording_mbid,
"score": row.rating
}
)
for user_id, data in user_rec.items():
messages = {
'user_id': user_id,
'type': 'cf_recommendations_recording_recommendations',
'recommendations': {
'top_artist': data.get('top_artist', []),
'similar_artist': data.get('similar_artist', [])
}
}
yield messages
yield {
'type': 'cf_recommendations_recording_mail',
'active_user_count': active_user_count,
'top_artist_user_count': top_artist_rec_user_count,
'similar_artist_user_count': similar_artist_rec_user_count,
'total_time': '{:.2f}'.format(total_time / 3600)
}
def get_recommendations_for_all(params: RecommendationParams, users):
""" Get recommendations for all active users.
Args:
params: RecommendationParams class object.
users = list of users names to generate recommendations.
Returns:
top_artist_rec_df: Top artist recommendations.
similar_artist_rec_df: Similar artist recommendations.
"""
try:
top_artist_candidate_set_rdd = get_candidate_set_rdd_for_user(params.top_artist_candidate_set_df, users)
except EmptyDataframeExcpetion:
logger.error('Top artist candidate set not found for any user.', exc_info=True)
raise
try:
similar_artist_candidate_set_rdd = get_candidate_set_rdd_for_user(params.similar_artist_candidate_set_df, users)
except EmptyDataframeExcpetion:
logger.error('Similar artist candidate set not found for any user.', exc_info=True)
raise
try:
top_artist_rec_df = generate_recommendations(top_artist_candidate_set_rdd, params,
params.recommendation_top_artist_limit)
except RecommendationsNotGeneratedException:
logger.error('Top artist recommendations not generated for any user', exc_info=True)
raise
try:
similar_artist_rec_df = generate_recommendations(similar_artist_candidate_set_rdd, params,
params.recommendation_similar_artist_limit)
except RecommendationsNotGeneratedException:
logger.error('Similar artist recommendations not generated for any user', exc_info=True)
raise
return top_artist_rec_df, similar_artist_rec_df
def get_user_count(df):
""" Get distinct user count from the given dataframe.
"""
users_df = df.select('spark_user_id').distinct()
return users_df.count()
def main(recommendation_top_artist_limit=None, recommendation_similar_artist_limit=None, users=None):
try:
listenbrainz_spark.init_spark_session('Recommendations')
except SparkSessionNotInitializedException as err:
logger.error(str(err), exc_info=True)
raise
try:
recordings_df = utils.read_files_from_HDFS(path.RECOMMENDATION_RECORDINGS_DATAFRAME)
top_artist_candidate_set_df = utils.read_files_from_HDFS(path.RECOMMENDATION_RECORDING_TOP_ARTIST_CANDIDATE_SET)
similar_artist_candidate_set_df = utils.read_files_from_HDFS(path.RECOMMENDATION_RECORDING_SIMILAR_ARTIST_CANDIDATE_SET)
except PathNotFoundException as err:
logger.error(str(err), exc_info=True)
raise
except FileNotFetchedException as err:
logger.error(str(err), exc_info=True)
raise
logger.info('Loading model...')
model = load_model()
# an action must be called to persist data in memory
recordings_df.count()
recordings_df.persist()
params = RecommendationParams(recordings_df, model, top_artist_candidate_set_df,
similar_artist_candidate_set_df,
recommendation_top_artist_limit,
recommendation_similar_artist_limit)
try:
# timestamp when the script was invoked
ts_initial = time.monotonic()
users_df = get_user_name_and_user_id(params, users)
# Some users are excluded from the top_artist_candidate_set because of the limited data
# in the mapping. Therefore, active_user_count may or may not be equal to number of users
# active in the last week. Ideally, top_artist_candidate_set should give the active user count.
active_user_count = users_df.count()
users_df.persist()
logger.info('Took {:.2f}sec to get active user count'.format(time.monotonic() - ts_initial))
except EmptyDataframeExcpetion as err:
logger.error(str(err), exc_info=True)
raise
logger.info('Generating recommendations...')
ts = time.monotonic()
top_artist_rec_df, similar_artist_rec_df = get_recommendations_for_all(params, users)
logger.info('Recommendations generated!')
logger.info('Took {:.2f}sec to generate recommendations for all active users'.format(time.monotonic() - ts))
ts = time.monotonic()
top_artist_rec_user_count = get_user_count(top_artist_rec_df)
similar_artist_rec_user_count = get_user_count(similar_artist_rec_df)
logger.info('Took {:.2f}sec to get top artist and similar artist user count'.format(time.monotonic() - ts))
ts = time.monotonic()
check_for_ratings_beyond_range(top_artist_rec_df, similar_artist_rec_df)
top_artist_rec_scaled_df = scale_rating(top_artist_rec_df)
similar_artist_rec_scaled_df = scale_rating(similar_artist_rec_df)
logger.info('Took {:.2f}sec to scale the ratings'.format(time.monotonic() - ts))
ts = time.monotonic()
top_artist_rec_mbid_df = get_recording_mbids(params, top_artist_rec_scaled_df, users_df)
similar_artist_rec_mbid_df = get_recording_mbids(params, similar_artist_rec_scaled_df, users_df)
logger.info('Took {:.2f}sec to get mbids corresponding to recording ids'.format(time.monotonic() - ts))
# persisted data must be cleared from memory after usage to avoid OOM
recordings_df.unpersist()
total_time = time.monotonic() - ts_initial
logger.info('Total time: {:.2f}sec'.format(total_time))
result = create_messages(top_artist_rec_mbid_df, similar_artist_rec_mbid_df, active_user_count, total_time,
top_artist_rec_user_count, similar_artist_rec_user_count)
users_df.unpersist()
return result
|
metabrainz/listenbrainz-server
|
listenbrainz_spark/recommendations/recording/recommend.py
|
Python
|
gpl-2.0
| 19,039
|
# Copyright (C) 2008, One Laptop Per Child
# Copyright (C) 2009, Tomeu Vizoso
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gettext import gettext as _
from gettext import ngettext
import locale
import logging
from gi.repository import GObject
from gi.repository import Gtk
from sugar3.graphics import style
from sugar3.graphics.icon import Icon, CellRendererIcon
from jarabe.controlpanel.sectionview import SectionView
from jarabe.model.update import updater
from jarabe.model import bundleregistry
_DEBUG_VIEW_ALL = True
class ActivityUpdater(SectionView):
def __init__(self, model, alerts):
SectionView.__init__(self)
self._model = updater.get_instance()
self._id_progresss = self._model.connect('progress',
self.__progress_cb)
self._id_updates = self._model.connect('updates-available',
self.__updates_available_cb)
self._id_error = self._model.connect('error',
self.__error_cb)
self._id_finished = self._model.connect('finished',
self.__finished_cb)
self.set_spacing(style.DEFAULT_SPACING)
self.set_border_width(style.DEFAULT_SPACING * 2)
self._top_label = Gtk.Label()
self._top_label.set_line_wrap(True)
self._top_label.set_justify(Gtk.Justification.LEFT)
self._top_label.props.xalign = 0
self.pack_start(self._top_label, False, True, 0)
self._top_label.show()
separator = Gtk.HSeparator()
self.pack_start(separator, False, True, 0)
separator.show()
self._bottom_label = Gtk.Label()
self._bottom_label.set_line_wrap(True)
self._bottom_label.set_justify(Gtk.Justification.LEFT)
self._bottom_label.props.xalign = 0
self._bottom_label.set_markup(
_('Software updates correct errors, eliminate security '
'vulnerabilities, and provide new features.'))
self.pack_start(self._bottom_label, False, True, 0)
self._bottom_label.show()
self._update_box = None
self._progress_pane = None
state = self._model.get_state()
if state in (updater.STATE_IDLE, updater.STATE_CHECKED):
self._refresh()
elif state in (updater.STATE_CHECKING, updater.STATE_DOWNLOADING,
updater.STATE_UPDATING):
self._switch_to_progress_pane()
self._progress_pane.set_message(_('Update in progress...'))
self.connect('destroy', self.__destroy_cb)
def __destroy_cb(self, widget):
self._model.disconnect(self._id_progresss)
self._model.disconnect(self._id_updates)
self._model.disconnect(self._id_error)
self._model.disconnect(self._id_finished)
self._model.clean()
def _switch_to_update_box(self, updates):
if self._update_box in self.get_children():
return
if self._progress_pane in self.get_children():
self.remove(self._progress_pane)
self._progress_pane = None
if self._update_box is None:
self._update_box = UpdateBox(updates)
self._update_box.refresh_button.connect(
'clicked',
self.__refresh_button_clicked_cb)
self._update_box.install_button.connect(
'clicked',
self.__install_button_clicked_cb)
self.pack_start(self._update_box, expand=True, fill=True, padding=0)
self._update_box.show()
def _switch_to_progress_pane(self):
if self._progress_pane in self.get_children():
return
if self._model.get_state() == updater.STATE_CHECKING:
top_message = _('Checking for updates...')
else:
top_message = _('Installing updates...')
self._top_label.set_markup('<big>%s</big>' % top_message)
if self._update_box in self.get_children():
self.remove(self._update_box)
self._update_box = None
if self._progress_pane is None:
self._progress_pane = ProgressPane()
self._progress_pane.cancel_button.connect(
'clicked',
self.__cancel_button_clicked_cb)
self.pack_start(
self._progress_pane, expand=True, fill=False, padding=0)
self._progress_pane.show()
def _clear_center(self):
if self._progress_pane in self.get_children():
self.remove(self._progress_pane)
self._progress_pane = None
if self._update_box in self.get_children():
self.remove(self._update_box)
self._update_box = None
def __progress_cb(self, model, state, bundle_name, progress):
if state == updater.STATE_CHECKING:
if bundle_name:
message = _('Checking %s...') % bundle_name
else:
message = _('Looking for updates...')
elif state == updater.STATE_DOWNLOADING:
message = _('Downloading %s...') % bundle_name
elif state == updater.STATE_UPDATING:
message = _('Updating %s...') % bundle_name
self._switch_to_progress_pane()
self._progress_pane.set_message(message)
self._progress_pane.set_progress(progress)
def __updates_available_cb(self, model, updates):
logging.debug('ActivityUpdater.__updates_available_cb')
available_updates = len(updates)
if not available_updates:
top_message = _('Your software is up-to-date')
else:
top_message = ngettext('You can install %s update',
'You can install %s updates',
available_updates)
top_message = top_message % available_updates
top_message = GObject.markup_escape_text(top_message)
self._top_label.set_markup('<big>%s</big>' % top_message)
if not available_updates:
self._clear_center()
else:
self._switch_to_update_box(updates)
def __error_cb(self, model, updates):
logging.debug('ActivityUpdater.__error_cb')
top_message = _('Can\'t connect to the activity server')
self._top_label.set_markup('<big>%s</big>' % top_message)
self._bottom_label.set_markup(
_('Verify your connection to internet and try again, '
'or try again later'))
self._clear_center()
def __refresh_button_clicked_cb(self, button):
self._refresh()
def _refresh(self):
self._model.check_updates()
def __install_button_clicked_cb(self, button):
self._model.update(self._update_box.get_bundles_to_update())
def __cancel_button_clicked_cb(self, button):
self._model.cancel()
def __finished_cb(self, model, installed_updates, failed_updates,
cancelled):
num_installed = len(installed_updates)
logging.debug('ActivityUpdater.__finished_cb')
top_message = ngettext('%s update was installed',
'%s updates were installed', num_installed)
top_message = top_message % num_installed
top_message = GObject.markup_escape_text(top_message)
self._top_label.set_markup('<big>%s</big>' % top_message)
self._clear_center()
def undo(self):
self._model.cancel()
class ProgressPane(Gtk.VBox):
"""Container which replaces the `ActivityPane` during refresh or
install."""
def __init__(self):
Gtk.VBox.__init__(self)
self.set_spacing(style.DEFAULT_PADDING)
self.set_border_width(style.DEFAULT_SPACING * 2)
self._progress = Gtk.ProgressBar()
self.pack_start(self._progress, True, True, 0)
self._progress.show()
self._label = Gtk.Label()
self._label.set_line_wrap(True)
self._label.set_property('xalign', 0.5)
self._label.modify_fg(Gtk.StateType.NORMAL,
style.COLOR_BUTTON_GREY.get_gdk_color())
self.pack_start(self._label, True, True, 0)
self._label.show()
alignment_box = Gtk.Alignment.new(xalign=0.5, yalign=0.5,
xscale=0, yscale=0)
self.pack_start(alignment_box, True, True, 0)
alignment_box.show()
self.cancel_button = Gtk.Button(stock=Gtk.STOCK_CANCEL)
alignment_box.add(self.cancel_button)
self.cancel_button.show()
def set_message(self, message):
self._label.set_text(message)
def set_progress(self, fraction):
self._progress.props.fraction = fraction
class UpdateBox(Gtk.VBox):
def __init__(self, updates):
Gtk.VBox.__init__(self)
self.set_spacing(style.DEFAULT_PADDING)
scrolled_window = Gtk.ScrolledWindow()
scrolled_window.set_policy(
Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.pack_start(scrolled_window, True, True, 0)
scrolled_window.show()
self._update_list = UpdateList(updates)
self._update_list.props.model.connect('row-changed',
self.__row_changed_cb)
scrolled_window.add(self._update_list)
self._update_list.show()
bottom_box = Gtk.HBox()
bottom_box.set_spacing(style.DEFAULT_SPACING)
self.pack_start(bottom_box, False, True, 0)
bottom_box.show()
self._size_label = Gtk.Label()
self._size_label.props.xalign = 0
self._size_label.set_justify(Gtk.Justification.LEFT)
bottom_box.pack_start(self._size_label, True, True, 0)
self._size_label.show()
self.refresh_button = Gtk.Button(stock=Gtk.STOCK_REFRESH)
bottom_box.pack_start(self.refresh_button, False, True, 0)
self.refresh_button.show()
self.install_button = Gtk.Button(_('Install selected'))
self.install_button.props.image = Icon(
icon_name='emblem-downloads',
pixel_size=style.SMALL_ICON_SIZE)
bottom_box.pack_start(self.install_button, False, True, 0)
self.install_button.show()
self._update_total_size_label()
def __row_changed_cb(self, list_model, path, iterator):
self._update_total_size_label()
self._update_install_button()
def _update_total_size_label(self):
total_size = 0
for row in self._update_list.props.model:
if row[UpdateListModel.SELECTED]:
total_size += row[UpdateListModel.SIZE]
markup = _('Download size: %s') % _format_size(total_size)
self._size_label.set_markup(markup)
def _update_install_button(self):
for row in self._update_list.props.model:
if row[UpdateListModel.SELECTED]:
self.install_button.props.sensitive = True
return
self.install_button.props.sensitive = False
def get_bundles_to_update(self):
bundles_to_update = []
for row in self._update_list.props.model:
if row[UpdateListModel.SELECTED]:
bundles_to_update.append(row[UpdateListModel.BUNDLE_ID])
return bundles_to_update
class UpdateList(Gtk.TreeView):
def __init__(self, updates):
list_model = UpdateListModel(updates)
Gtk.TreeView.__init__(self, list_model)
self.set_reorderable(False)
self.set_enable_search(False)
self.set_headers_visible(False)
toggle_renderer = Gtk.CellRendererToggle()
toggle_renderer.props.activatable = True
toggle_renderer.props.xpad = style.DEFAULT_PADDING
toggle_renderer.props.indicator_size = style.zoom(26)
toggle_renderer.connect('toggled', self.__toggled_cb)
toggle_column = Gtk.TreeViewColumn()
toggle_column.pack_start(toggle_renderer, True)
toggle_column.add_attribute(toggle_renderer, 'active',
UpdateListModel.SELECTED)
self.append_column(toggle_column)
icon_renderer = CellRendererIcon(self)
icon_renderer.props.width = style.STANDARD_ICON_SIZE
icon_renderer.props.height = style.STANDARD_ICON_SIZE
icon_renderer.props.size = style.STANDARD_ICON_SIZE
icon_renderer.props.xpad = style.DEFAULT_PADDING
icon_renderer.props.ypad = style.DEFAULT_PADDING
icon_renderer.props.stroke_color = style.COLOR_TOOLBAR_GREY.get_svg()
icon_renderer.props.fill_color = style.COLOR_TRANSPARENT.get_svg()
icon_column = Gtk.TreeViewColumn()
icon_column.pack_start(icon_renderer, True)
icon_column.add_attribute(icon_renderer, 'file-name',
UpdateListModel.ICON_FILE_NAME)
self.append_column(icon_column)
text_renderer = Gtk.CellRendererText()
description_column = Gtk.TreeViewColumn()
description_column.pack_start(text_renderer, True)
description_column.add_attribute(text_renderer, 'markup',
UpdateListModel.DESCRIPTION)
self.append_column(description_column)
def __toggled_cb(self, cell_renderer, path):
row = self.props.model[path]
row[UpdateListModel.SELECTED] = not row[UpdateListModel.SELECTED]
class UpdateListModel(Gtk.ListStore):
BUNDLE_ID = 0
SELECTED = 1
ICON_FILE_NAME = 2
DESCRIPTION = 3
SIZE = 4
def __init__(self, updates):
Gtk.ListStore.__init__(self, str, bool, str, str, int)
registry = bundleregistry.get_registry()
for bundle_update in updates:
installed = registry.get_bundle(bundle_update.bundle_id)
row = [None] * 5
row[self.BUNDLE_ID] = bundle_update.bundle_id
row[self.SELECTED] = True
if installed:
row[self.ICON_FILE_NAME] = installed.get_icon()
else:
if bundle_update.icon_file_name is not None:
row[self.ICON_FILE_NAME] = bundle_update.icon_file_name
if installed:
details = _('From version %(current)s to %(new)s (Size: '
'%(size)s)')
details = details % \
{'current': installed.get_activity_version(),
'new': bundle_update.version,
'size': _format_size(bundle_update.size)}
else:
details = _('Version %(version)s (Size: %(size)s)')
details = details % \
{'version': bundle_update.version,
'size': _format_size(bundle_update.size)}
row[self.DESCRIPTION] = '<b>%s</b>\n%s' % \
(bundle_update.name, details)
row[self.SIZE] = bundle_update.size
self.append(row)
def _format_size(size):
"""Convert a given size in bytes to a nicer better readable unit"""
if size == 0:
# TRANS: download size is 0
return _('None')
elif size < 1024:
# TRANS: download size of very small updates
return _('1 KB')
elif size < 1024 * 1024:
# TRANS: download size of small updates, e.g. '250 KB'
return locale.format_string(_('%.0f KB'), size / 1024.0)
else:
# TRANS: download size of updates, e.g. '2.3 MB'
return locale.format_string(_('%.1f MB'), size / 1024.0 / 1024)
|
rparrapy/sugar
|
extensions/cpsection/updater/view.py
|
Python
|
gpl-2.0
| 16,181
|
#
# Copyright (C) 2010 Norwegian University of Science and Technology
# Copyright (C) 2011-2015 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details. You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""View controller for PortAdmin"""
import ConfigParser
import logging
import json
from operator import or_ as OR
from django.http import HttpResponse, JsonResponse
from django.template import RequestContext, Context
from django.shortcuts import render, render_to_response, get_object_or_404
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.views.decorators.http import require_POST
from nav.auditlog.models import LogEntry
from nav.auditlog.utils import get_auditlog_entries
from nav.django.utils import get_account
from nav.web.utils import create_title
from nav.models.manage import Netbox, Interface
from nav.web.portadmin.utils import (get_and_populate_livedata,
find_and_populate_allowed_vlans,
get_aliastemplate, get_ifaliasformat,
save_to_database,
check_format_on_ifalias,
find_allowed_vlans_for_user_on_netbox,
find_allowed_vlans_for_user,
filter_vlans, fetch_voice_vlans,
should_check_access_rights,
mark_detained_interfaces,
read_config, is_cisco,
add_dot1x_info,
is_restart_interface_enabled,
is_write_mem_enabled)
from nav.Snmp.errors import SnmpError, TimeOutException
from nav.portadmin.snmputils import SNMPFactory, SNMPHandler
from .forms import SearchForm
_logger = logging.getLogger("nav.web.portadmin")
def get_base_context(additional_paths=None, form=None):
"""Returns a base context for portadmin
:type additional_paths: list of tuple
"""
navpath = [('Home', '/'), ('PortAdmin', reverse('portadmin-index'))]
if additional_paths:
navpath += additional_paths
form = form if form else SearchForm()
return {
'navpath': navpath,
'title': create_title(navpath),
'form': form
}
def default_render(request):
"""Default render for errors etc"""
return render(request, 'portadmin/base.html',
get_base_context(form=get_form(request)))
def get_form(request):
"""If we are searching for something, return a bound form with the
search parameter"""
if 'query' in request.GET:
return SearchForm(request.GET)
def index(request):
"""View for showing main page"""
netboxes = []
interfaces = []
form = get_form(request)
if form and form.is_valid():
netboxes, interfaces = search(form.cleaned_data['query'])
if len(netboxes) == 1 and not interfaces:
return search_by_sysname(request, netboxes[0].sysname)
elif len(interfaces) == 1 and not netboxes:
return search_by_interfaceid(request, interfaces[0].id)
else:
form = SearchForm()
context = get_base_context(form=form)
context['netboxes'] = netboxes
context['interfaces'] = interfaces
return render(request, 'portadmin/base.html', context)
def search(query):
"""Search for something in portadmin"""
netbox_filters = [
Q(sysname__icontains=query),
Q(ip=query)
]
netboxes = Netbox.objects.filter(
reduce(OR, netbox_filters)).order_by('sysname')
interfaces = Interface.objects.filter(
ifalias__icontains=query).order_by('netbox__sysname', 'ifname')
return netboxes, interfaces
def search_by_ip(request, ip):
"""View for showing a search done by ip-address"""
return search_by_kwargs(request, ip=ip)
def search_by_sysname(request, sysname):
"""View for showing a search done by sysname"""
return search_by_kwargs(request, sysname=sysname)
def search_by_kwargs(request, **kwargs):
"""Search by keyword arguments"""
try:
netbox = Netbox.objects.get(**kwargs)
except Netbox.DoesNotExist as do_not_exist_ex:
_logger.error("Netbox %s not found; DoesNotExist = %s",
kwargs.get('sysname') or kwargs.get('ip'),
do_not_exist_ex)
messages.error(request, 'Could not find IP device')
return default_render(request)
else:
if not netbox.type:
messages.error(request, 'IP device found but has no type')
return default_render(request)
interfaces = netbox.get_swports_sorted()
auditlog_entries = get_auditlog_entries(interfaces)
return render(request, 'portadmin/netbox.html',
populate_infodict(request, netbox, interfaces,
auditlog_entries))
def search_by_interfaceid(request, interfaceid):
"""View for showing a search done by interface id"""
try:
interface = Interface.objects.get(id=interfaceid)
except Interface.DoesNotExist as do_not_exist_ex:
_logger.error("Interface %s not found; DoesNotExist = %s",
interfaceid, do_not_exist_ex)
messages.error(request,
'Could not find interface with id %s' %
str(interfaceid))
return default_render(request)
else:
netbox = interface.netbox
if not netbox.type:
messages.error(request, 'IP device found but has no type')
return default_render(request)
interfaces = [interface]
auditlog_entries = get_auditlog_entries(interfaces)
return render(request, 'portadmin/netbox.html',
populate_infodict(request, netbox, interfaces,
auditlog_entries))
def populate_infodict(request, netbox, interfaces, auditlog_entries=None):
"""Populate a dictionary used in every http response"""
allowed_vlans = []
voice_vlan = None
readonly = False
config = read_config()
auditlog_entries = {} if auditlog_entries is None else auditlog_entries
try:
fac = get_and_populate_livedata(netbox, interfaces)
allowed_vlans = find_and_populate_allowed_vlans(
request.account, netbox, interfaces, fac)
voice_vlan = fetch_voice_vlan_for_netbox(request, fac, config)
if voice_vlan:
if is_cisco_voice_enabled(config) and is_cisco(netbox):
set_voice_vlan_attribute_cisco(voice_vlan, interfaces, fac)
else:
set_voice_vlan_attribute(voice_vlan, interfaces)
mark_detained_interfaces(interfaces)
if is_dot1x_enabled:
add_dot1x_info(interfaces, fac)
except TimeOutException:
readonly = True
messages.error(request, "Timeout when contacting %s. Values displayed "
"are from database" % netbox.sysname)
if not netbox.read_only:
messages.error(request, "Read only community not set")
except SnmpError:
readonly = True
messages.error(request, "SNMP error when contacting %s. Values "
"displayed are from database" % netbox.sysname)
if check_read_write(netbox, request):
readonly = True
ifaliasformat = get_ifaliasformat(config)
aliastemplate = ''
if ifaliasformat:
tmpl = get_aliastemplate()
aliastemplate = tmpl.render(Context({'ifaliasformat': ifaliasformat}))
save_to_database(interfaces)
info_dict = get_base_context([(netbox.sysname, )], form=get_form(request))
info_dict.update({'interfaces': interfaces,
'auditmodel': netbox.sysname,
'netbox': netbox,
'voice_vlan': voice_vlan,
'allowed_vlans': allowed_vlans,
'readonly': readonly,
'aliastemplate': aliastemplate,
'auditlog_api_parameters': json.dumps(
{'subsystem': 'portadmin'}),
'auditlog_entries': auditlog_entries,})
return info_dict
def is_dot1x_enabled(config):
"""Checks of dot1x config option is true"""
section = 'general'
option = 'enabledot1x'
try:
return (config.has_option(section, option) and
config.getboolean(section, option))
except ValueError:
pass
return False
def is_cisco_voice_enabled(config):
"""Checks if the Cisco config option is enabled"""
section = 'general'
option = 'cisco_voice_vlan'
if config.has_section(section):
if config.has_option(section, option):
return config.getboolean(section, option)
return False
def fetch_voice_vlan_for_netbox(request, factory, config=None):
"""Fetch the voice vlan for this netbox
There may be multiple voice vlans configured. Pick the one that exists
on this netbox. If multiple vlans exist, we cannot know which one to use.
"""
if config is None:
config = read_config()
voice_vlans = fetch_voice_vlans(config)
if not voice_vlans:
return
voice_vlans_on_netbox = list(set(voice_vlans) &
set(factory.get_available_vlans()))
if not voice_vlans_on_netbox:
# Should this be reported? At the moment I do not think so.
return
if len(voice_vlans_on_netbox) > 1:
messages.error(request, 'Multiple voice vlans configured on this '
'netbox')
return
return voice_vlans_on_netbox[0]
def set_voice_vlan_attribute(voice_vlan, interfaces):
"""Set an attribute on the interfaces to indicate voice vlan behavior"""
if voice_vlan:
for interface in interfaces:
if not interface.trunk:
continue
allowed_vlans = interface.swportallowedvlan.get_allowed_vlans()
interface.voice_activated = (len(allowed_vlans) == 1 and
voice_vlan in allowed_vlans)
def set_voice_vlan_attribute_cisco(voice_vlan, interfaces, fac):
"""Set voice vlan attribute for Cisco voice vlan"""
voice_mapping = fac.get_cisco_voice_vlans()
for interface in interfaces:
voice_activated = voice_mapping.get(interface.ifindex) == voice_vlan
interface.voice_activated = voice_activated
def check_read_write(netbox, request):
"""Add a message to user explaining why he can't edit anything
:returns: flag indicating readonly or not
"""
if not netbox.read_write:
messages.error(request,
"Write community not set for this device, "
"changes cannot be saved")
return True
return False
def save_interfaceinfo(request):
"""Set ifalias and/or vlan on netbox
messages: created from the results from the messages framework
interfaceid must be a part of the request
ifalias, vlan and voicevlan are all optional
"""
if request.method == 'POST':
interface = Interface.objects.get(pk=request.POST.get('interfaceid'))
account = get_account(request)
# Skip a lot of queries if access_control is not turned on
if should_check_access_rights(account):
_logger.info('Checking access rights for %s', account)
if interface.vlan in [v.vlan for v in
find_allowed_vlans_for_user_on_netbox(
account, interface.netbox)]:
set_interface_values(account, interface, request)
else:
# Should only happen if user tries to avoid gui restrictions
messages.error(request, 'Not allowed to edit this interface')
else:
set_interface_values(account, interface, request)
else:
messages.error(request, 'Wrong request type')
result = {"messages": build_ajax_messages(request)}
return response_based_on_result(result)
def set_interface_values(account, interface, request):
"""Use snmp to set the values in the request on the netbox"""
fac = get_factory(interface.netbox)
if fac:
# Order is important here, set_voice need to be before set_vlan
set_voice_vlan(fac, interface, request)
set_ifalias(account, fac, interface, request)
set_vlan(account, fac, interface, request)
set_admin_status(fac, interface, request)
save_to_database([interface])
else:
messages.info(request, 'Could not connect to netbox')
def build_ajax_messages(request):
"""Create a structure suitable for converting to json from messages"""
ajax_messages = []
for message in messages.get_messages(request):
ajax_messages.append({
'level': message.level,
'message': message.message,
'extra_tags': message.tags
})
return ajax_messages
def set_ifalias(account, fac, interface, request):
"""Set ifalias on netbox if it is requested"""
if 'ifalias' in request.POST:
ifalias = request.POST.get('ifalias')
if check_format_on_ifalias(ifalias):
try:
fac.set_if_alias(interface.ifindex, ifalias)
interface.ifalias = ifalias
LogEntry.add_log_entry(
account,
u'set-ifalias',
u'{actor}: {object} - ifalias set to "%s"' % ifalias,
subsystem=u'portadmin',
object=interface,
)
_logger.info('%s: %s:%s - ifalias set to "%s"', account.login,
interface.netbox.get_short_sysname(),
interface.ifname, ifalias)
except SnmpError as error:
_logger.error('Error setting ifalias: %s', error)
messages.error(request, "Error setting ifalias: %s" % error)
else:
messages.error(request, "Wrong format on port description")
def set_vlan(account, fac, interface, request):
"""Set vlan on netbox if it is requested"""
if 'vlan' in request.POST:
vlan = int(request.POST.get('vlan'))
try:
if is_cisco(interface.netbox):
# If Cisco and trunk voice vlan (not Cisco voice vlan),
# we have to set native vlan instead of access vlan
config = read_config()
voice_activated = request.POST.get('voice_activated', False)
if not is_cisco_voice_enabled(config) and voice_activated:
fac.set_native_vlan(interface, vlan)
else:
fac.set_vlan(interface.ifindex, vlan)
else:
fac.set_vlan(interface.ifindex, vlan)
interface.vlan = vlan
LogEntry.add_log_entry(
account,
u'set-vlan',
u'{actor}: {object} - vlan set to "%s"' % vlan,
subsystem=u'portadmin',
object=interface,
)
_logger.info('%s: %s:%s - vlan set to %s', account.login,
interface.netbox.get_short_sysname(),
interface.ifname, vlan)
except (SnmpError, TypeError) as error:
_logger.error('Error setting vlan: %s', error)
messages.error(request, "Error setting vlan: %s" % error)
def set_voice_vlan(fac, interface, request):
"""Set voicevlan on interface
A voice vlan is a normal vlan that is defined by the user of NAV as
a vlan that is used only for ip telephone traffic.
To set a voice vlan we have to make sure the interface is configured
to tag both the voicevlan and the "access-vlan".
"""
if 'voicevlan' in request.POST:
config = read_config()
voice_vlan = fetch_voice_vlan_for_netbox(request, fac, config)
use_cisco_voice_vlan = (is_cisco_voice_enabled(config) and
is_cisco(interface.netbox))
# Either the voicevlan is turned off or turned on
turn_on_voice_vlan = request.POST.get('voicevlan') == 'true'
account = get_account(request)
try:
if turn_on_voice_vlan:
if use_cisco_voice_vlan:
fac.set_cisco_voice_vlan(interface, voice_vlan)
else:
fac.set_voice_vlan(interface, voice_vlan)
_logger.info('%s: %s:%s - %s', account.login,
interface.netbox.get_short_sysname(),
interface.ifname, 'voice vlan enabled')
else:
if use_cisco_voice_vlan:
fac.disable_cisco_voice_vlan(interface)
else:
fac.set_access(interface, interface.vlan)
_logger.info('%s: %s:%s - %s', account.login,
interface.netbox.get_short_sysname(),
interface.ifname, 'voice vlan disabled')
except (SnmpError, ValueError, NotImplementedError) as error:
messages.error(request, "Error setting voicevlan: %s" % error)
def set_admin_status(fac, interface, request):
"""Set admin status for the interface
:type fac: nav.portadmin.snmputils.SNMPFactory
:type request: django.http.HttpRequest
"""
status_up = '1'
status_down = '2'
account = request.account
if 'ifadminstatus' in request.POST:
adminstatus = request.POST['ifadminstatus']
try:
if adminstatus == status_up:
LogEntry.add_log_entry(
account,
u'change status to up',
u'change status to up',
subsystem=u'portadmin',
object=interface,
)
_logger.info('%s: Setting ifadminstatus for %s to %s',
account.login, interface, 'up')
fac.set_if_up(interface.ifindex)
elif adminstatus == status_down:
LogEntry.add_log_entry(
account,
u'change status to down',
u'change status to down',
subsystem=u'portadmin',
object=interface,
)
_logger.info('%s: Setting ifadminstatus for %s to %s',
account.login, interface, 'down')
fac.set_if_down(interface.ifindex)
except (SnmpError, ValueError) as error:
messages.error(request, "Error setting ifadminstatus: %s" % error)
def response_based_on_result(result):
"""Return response based on content of result
result: dict containing result and message keys
"""
if result['messages']:
return JsonResponse(result, status=500)
else:
return JsonResponse(result)
def render_trunk_edit(request, interfaceid):
"""Controller for rendering trunk edit view"""
interface = Interface.objects.get(pk=interfaceid)
agent = get_factory(interface.netbox)
if request.method == 'POST':
try:
handle_trunk_edit(request, agent, interface)
except SnmpError as error:
messages.error(request, 'Error editing trunk: %s' % error)
else:
messages.success(request, 'Trunk edit successful')
account = request.account
netbox = interface.netbox
check_read_write(netbox, request)
try:
vlans = agent.get_netbox_vlans() # All vlans on this netbox
native_vlan, trunked_vlans = agent.get_native_and_trunked_vlans(
interface)
except SnmpError:
vlans = native_vlan = trunked_vlans = allowed_vlans = None
messages.error(request, 'Error getting trunk information')
else:
if should_check_access_rights(account):
allowed_vlans = find_allowed_vlans_for_user_on_netbox(
account, interface.netbox, agent)
else:
allowed_vlans = vlans
extra_path = [(netbox.sysname,
reverse('portadmin-sysname',
kwargs={'sysname': netbox.sysname})),
("Trunk %s" % interface,)]
context = get_base_context(extra_path)
context.update({'interface': interface, 'available_vlans': vlans,
'native_vlan': native_vlan, 'trunked_vlans': trunked_vlans,
'allowed_vlans': allowed_vlans})
return render_to_response('portadmin/trunk_edit.html',
context,
RequestContext(request))
def handle_trunk_edit(request, agent, interface):
"""Edit a trunk"""
native_vlan = int(request.POST.get('native_vlan'))
trunked_vlans = [int(vlan) for vlan in request.POST.getlist('trunk_vlans')]
if should_check_access_rights(get_account(request)):
# A user can avoid the form restrictions by sending a forged post
# request Make sure only the allowed vlans are set
old_native, old_trunked = agent.get_native_and_trunked_vlans(interface)
allowed_vlans = [v.vlan for v in
find_allowed_vlans_for_user(get_account(request))]
trunked_vlans = filter_vlans(trunked_vlans, old_trunked, allowed_vlans)
native_vlan = (native_vlan if native_vlan in allowed_vlans
else old_native)
_logger.info('Interface %s - native: %s, trunk: %s', interface,
native_vlan, trunked_vlans)
LogEntry.add_log_entry(
request.account,
u'set-vlan',
u'{actor}: {object} - native vlan: "%s", trunk vlans: "%s"' % (native_vlan, trunked_vlans),
subsystem=u'portadmin',
object=interface,
)
if trunked_vlans:
agent.set_trunk(interface, native_vlan, trunked_vlans)
else:
agent.set_access(interface, native_vlan)
@require_POST
def restart_interface(request):
"""Restart the interface by setting admin status to down and up"""
if not is_restart_interface_enabled():
_logger.debug('Not doing a restart of interface, it is configured off')
return HttpResponse()
interface = get_object_or_404(
Interface, pk=request.POST.get('interfaceid'))
fac = get_factory(interface.netbox)
if fac:
adminstatus = fac.get_if_admin_status(interface.ifindex)
if adminstatus == SNMPHandler.IF_ADMIN_STATUS_DOWN:
_logger.debug('Not restarting %s as it is down', interface)
return HttpResponse()
_logger.debug('Restarting interface %s', interface)
try:
# Restart interface so that client fetches new address
fac.restart_if(interface.ifindex)
except TimeOutException:
# Swallow this exception as it is not important. Others should
# create an error
pass
return HttpResponse()
else:
return HttpResponse(status=500)
@require_POST
def write_mem(request):
"""Do a write mem on the netbox"""
if not is_write_mem_enabled():
_logger.debug('Not doing a write mem, it is configured off')
return HttpResponse("Write mem is configured to not be done")
interface = get_object_or_404(
Interface, pk=request.POST.get('interfaceid'))
fac = get_factory(interface.netbox)
if fac:
try:
fac.write_mem()
except SnmpError as error:
error_message = 'Error doing write mem on {}: {}'.format(
fac.netbox, error)
_logger.error(error_message)
return HttpResponse(error_message, status=500)
except AttributeError:
error_message = 'Error doing write mem on {}: {}'.format(
fac.netbox, 'Write to memory not supported')
_logger.error(error_message)
return HttpResponse(error_message, status=500)
return HttpResponse()
else:
return HttpResponse(status=500)
def get_factory(netbox):
"""Get a SNMP factory instance"""
config = read_config()
timeout = get_config_value(config, 'general', 'timeout', fallback=3)
retries = get_config_value(config, 'general', 'retries', fallback=3)
try:
return SNMPFactory.get_instance(netbox, timeout=timeout,
retries=retries)
except SnmpError as error:
_logger.error('Error getting snmpfactory instance %s: %s',
netbox, error)
def get_config_value(config, section, key, fallback=None):
"""Get the value of key from a ConfigParser object, with fallback"""
try:
return config.get(section, key)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return fallback
|
sigmunau/nav
|
python/nav/web/portadmin/views.py
|
Python
|
gpl-2.0
| 25,624
|
#! /usr/bin/python3
# -*- coding:utf-8 -*-
# Funciones y parametros arbitrarios
def funcion(**nombres):
print (type(nombres))
for alumno in nombres:
print ("%s es alumno y tiene %d años" % (alumno, nombres[alumno]))
return nombres
#diccionario = {"Adrian":25, "Niño":25, "Roberto":23, "Celina":23}
print (funcion(Adrian = 25, Nino = 25, Roberto = 23, Celina = 23))
|
IntelBUAP/Python3
|
codigo27.py
|
Python
|
gpl-2.0
| 388
|
class OptParser:
"""Parses the options in the given file and allowed for the content to be returned.
The file is expected to contain key/value pairs. Empty lines, and lines where first non
white space is a # are ignored.
"""
def __init__(self, filename):
self.filename = filename
def parse(self):
myvars = {}
with open(self.filename) as myfile:
for line in myfile:
if line.strip().startswith('#') or len(line.strip()) == 0:
continue
name, var = line.partition("=")[::2]
myvars[name.strip()] = var.strip()
self.content = myvars
def getContent(self):
return self.content
if __name__ == '__main__':
import sys
parser = OptParser(sys.argv[1])
parser.parse()
print parser.getContent()
|
simonmikkelsen/roughcut
|
lib/optparser.py
|
Python
|
gpl-2.0
| 760
|
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# tails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
import ast
import re
import socket
import time
import os
from pathlib2 import Path
import config
import livestatus
import sites
from gui_exceptions import MKGeneralException
import cmk.paths
import cmk.ec.settings
import cmk.ec.export
import cmk.store
import cmk.utils
if cmk.is_managed_edition():
import managed
else:
managed = None
# ASN1 MIB source directory candidates. Non existing dirs are ok.
# Please sync these paths with htdocs/mkeventd.py
mib_dirs = [ ('/usr/share/snmp/mibs', _('System MIBs')) ]
socket_path = cmk.paths.omd_root + "/tmp/run/mkeventd/status"
compiled_mibs_dir = cmk.paths.omd_root + "/local/share/check_mk/compiled_mibs"
# Please sync these paths with htdocs/mkeventd.py
mib_upload_dir = cmk.paths.omd_root + "/local/share/snmp/mibs"
mib_dirs.insert(0, (cmk.paths.omd_root + "/share/snmp/mibs", _('MIBs shipped with Check_MK')))
mib_dirs.insert(0, (mib_upload_dir, _('Custom MIBs')))
syslog_priorities = [
(0, "emerg" ),
(1, "alert" ),
(2, "crit" ),
(3, "err" ),
(4, "warning" ),
(5, "notice" ),
(6, "info" ),
(7, "debug" ),
]
syslog_facilities = [
(0, "kern"),
(1, "user"),
(2, "mail"),
(3, "daemon"),
(4, "auth"),
(5, "syslog"),
(6, "lpr"),
(7, "news"),
(8, "uucp"),
(9, "cron"),
(10, "authpriv"),
(11, "ftp"),
(12, "(12: unused)"),
(13, "(13: unused)"),
(14, "(14: unused)"),
(15, "(15: unused)"),
(16, "local0"),
(17, "local1"),
(18, "local2"),
(19, "local3"),
(20, "local4"),
(21, "local5"),
(22, "local6"),
(23, "local7"),
(31, "snmptrap"),
]
phase_names = {
'counting' : _("counting"),
'delayed' : _("delayed"),
'open' : _("open"),
'ack' : _("acknowledged"),
'closed' : _("closed"),
}
action_whats = {
"ORPHANED" : _("Event deleted in counting state because rule was deleted."),
"NOCOUNT" : _("Event deleted in counting state because rule does not count anymore"),
"DELAYOVER" : _("Event opened because the delay time has elapsed before cancelling event arrived."),
"EXPIRED" : _("Event deleted because its livetime expired"),
"COUNTREACHED" : _("Event deleted because required count had been reached"),
"COUNTFAILED" : _("Event created by required count was not reached in time"),
"UPDATE" : _("Event information updated by user"),
"NEW" : _("New event created"),
"DELETE" : _("Event deleted manually by user"),
"EMAIL" : _("Email sent"),
"SCRIPT" : _("Script executed"),
"CANCELLED" : _("The event was cancelled because the corresponding OK message was received"),
"ARCHIVED" : _("Event was archived because no rule matched and archiving is activated in global settings."),
"AUTODELETE" : _("Event was deleted automatically"),
"CHANGESTATE" : _("State of event changed by user"),
}
def service_levels():
try:
return config.mkeventd_service_levels
except:
return [(0, "(no service level)")]
def action_choices(omit_hidden = False):
# The possible actions are configured in mkeventd.mk,
# not in multisite.mk (like the service levels). That
# way we have not direct access to them but need
# to load them from the configuration.
return [ ( "@NOTIFY", _("Send monitoring notification")) ] + \
[ (a["id"], a["title"])
for a in eventd_configuration().get("actions", [])
if not omit_hidden or not a.get("hidden") ]
cached_config = None
def eventd_configuration():
global cached_config
if cached_config and cached_config[0] is html:
return cached_config[1]
settings = cmk.ec.settings.settings('',
Path(cmk.paths.omd_root),
Path(cmk.paths.default_config_dir),
[''])
config = cmk.ec.export.load_config(settings)
cached_config = (html, config)
return config
def daemon_running():
return os.path.exists(socket_path)
# Note: in order to be able to simulate an original IP address
# we put hostname|ipaddress into the host name field. The EC
# recognizes this and unpacks the data correctly.
def send_event(event):
# "<%PRI%>@%TIMESTAMP%;%SL% %HOSTNAME% %syslogtag% %msg%\n"
prio = (event["facility"] << 3) + event["priority"]
rfc = [
"<%d>@%d" % (prio, int(time.time())),
"%d %s|%s %s: %s\n" % (event["sl"], event["host"],
event["ipaddress"], event["application"], event["text"]),
]
execute_command("CREATE", map(cmk.utils.make_utf8, rfc), site=event["site"])
return ";".join(rfc)
def get_local_ec_status():
response = livestatus.LocalConnection().query("GET eventconsolestatus")
return dict(zip(response[0], response[1]))
def replication_mode():
try:
status = get_local_ec_status()
return status["status_replication_slavemode"]
except livestatus.MKLivestatusSocketError:
return "stopped"
# Only use this for master/slave replication. For status queries use livestatus
def query_ec_directly(query):
try:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
timeout = config.mkeventd_connect_timeout
except:
timeout = 10
sock.settimeout(timeout)
sock.connect(socket_path)
sock.sendall(query)
sock.shutdown(socket.SHUT_WR)
response_text = ""
while True:
chunk = sock.recv(8192)
response_text += chunk
if not chunk:
break
return ast.literal_eval(response_text)
except SyntaxError, e:
raise MKGeneralException(_("Invalid response from event daemon: "
"<pre>%s</pre>") % response_text)
except Exception, e:
raise MKGeneralException(_("Cannot connect to event daemon via %s: %s") %
(socket_path, e))
def execute_command(name, args=None, site=None):
if args:
formated_args = ";" + ";".join(args)
else:
formated_args = ""
query = "[%d] EC_%s%s" % (int(time.time()), name, formated_args)
sites.live().command(query, site)
def get_total_stats(only_sites):
stats_keys = [
"status_average_message_rate",
"status_average_rule_trie_rate",
"status_average_rule_hit_rate",
"status_average_event_rate",
"status_average_connect_rate",
"status_average_overflow_rate",
"status_average_rule_trie_rate",
"status_average_rule_hit_rate",
"status_average_processing_time",
"status_average_request_time",
"status_average_sync_time",
]
stats_per_site = list(get_stats_per_site(only_sites, stats_keys))
# First simply add rates. Times must then be averaged
# weighted by message rate or connect rate
total_stats = {}
for row in stats_per_site:
for key, value in row.items():
if key.endswith("rate"):
total_stats.setdefault(key, 0.0)
total_stats[key] += value
if not total_stats:
if only_sites is None:
raise MKGeneralException(_("Got no data from any site"))
else:
raise MKGeneralException(_("Got no data from this site"))
for row in stats_per_site:
for time_key, in_relation_to in [
( "status_average_processing_time", "status_average_message_rate" ),
( "status_average_request_time", "status_average_connect_rate" ),
]:
total_stats.setdefault(time_key, 0.0)
if total_stats[in_relation_to]: # avoid division by zero
my_weight = row[in_relation_to] / total_stats[in_relation_to]
total_stats[time_key] += my_weight * row[time_key]
total_sync_time = 0.0
count = 0
for row in stats_per_site:
if row["status_average_sync_time"] > 0.0:
count += 1
total_sync_time += row["status_average_sync_time"]
if count > 0:
total_stats["status_average_sync_time"] = total_sync_time / count
return total_stats
def get_stats_per_site(only_sites, stats_keys):
try:
sites.live().set_only_sites(only_sites)
for list_row in sites.live().query("GET eventconsolestatus\nColumns: %s" % " ".join(stats_keys)):
yield dict(zip(stats_keys, list_row))
finally:
sites.live().set_only_sites(None)
# Rule matching for simulation. Yes - there is some hateful code duplication
# here. But it does not make sense to query the live eventd here since it
# does not know anything about the currently configured but not yet activated
# rules. And also we do not want to have shared code.
def event_rule_matches(rule_pack, rule, event):
result = event_rule_matches_non_inverted(rule_pack, rule, event)
if rule.get("invert_matching"):
if type(result) == tuple:
return _("The rule would match, but matching is inverted.")
else:
return False, ()
else:
return result
def event_rule_matches_non_inverted(rule_pack, rule, event):
if False == match_ipv4_network(rule.get("match_ipaddress", "0.0.0.0/0"), event["ipaddress"]):
return _("The source IP address does not match.")
if False == match(rule.get("match_host"), event["host"], complete=True):
return _("The host name does not match.")
if False == match(rule.get("match_application"), event["application"], complete=False):
return _("The application (syslog tag) does not match")
if "match_facility" in rule and event["facility"] != rule["match_facility"]:
return _("The syslog facility does not match")
# First try cancelling rules
if "match_ok" in rule or "cancel_priority" in rule:
if "cancel_priority" in rule:
up, lo = rule["cancel_priority"]
cp = event["priority"] >= lo and event["priority"] <= up
else:
cp = True
match_groups = match(rule.get("match_ok", ""), event["text"], complete = False)
if match_groups != False and cp:
if match_groups == True:
match_groups = ()
return True, match_groups
try:
match_groups = match(rule.get("match"), event["text"], complete = False)
except Exception, e:
return _("Invalid regular expression: %s") % e
if match_groups == False:
return _("The message text does not match the required pattern.")
if "match_priority" in rule:
prio_from, prio_to = rule["match_priority"]
if prio_from > prio_to:
prio_to, prio_from = prio_from, prio_to
p = event["priority"]
if p < prio_from or p > prio_to:
return _("The syslog priority is not in the required range.")
if "match_sl" in rule:
sl_from, sl_to = rule["match_sl"]
if sl_from > sl_to:
sl_to, sl_from = sl_from, sl_to
p = event.get("sl")
if p == None:
return _("No service level is set in event")
if p < sl_from or p > sl_to:
return _("Wrong service level %d (need %d..%d)") % (p, sl_from, sl_to)
if "match_timeperiod" in rule:
reason = check_timeperiod(rule["match_timeperiod"])
if reason:
return reason
if cmk.is_managed_edition():
import managed
if "customer" in rule_pack:
rule_customer_id = rule_pack["customer"]
else:
rule_customer_id = rule.get("customer", managed.SCOPE_GLOBAL)
site_customer_id = managed.get_customer_id(config.sites[event["site"]])
if rule_customer_id != managed.SCOPE_GLOBAL and site_customer_id != rule_customer_id:
return _("Wrong customer")
if match_groups == True:
match_groups = () # no matching groups
return False, match_groups
def check_timeperiod(tpname):
try:
livesock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
livesock.connect(cmk.paths.livestatus_unix_socket)
livesock.send("GET timeperiods\nFilter: name = %s\nColumns: in\n" % tpname)
livesock.shutdown(socket.SHUT_WR)
answer = livesock.recv(100).strip()
if answer == "":
return _("The timeperiod %s is not known to the local monitoring core") % tpname
elif int(answer) == 0:
return _("The timeperiod %s is currently not active") % tpname
except Exception, e:
if config.debug:
raise
return _("Cannot update timeperiod information for %s: %s") % (tpname, e)
def match(pattern, text, complete = True):
if pattern == None:
return True
else:
if complete:
if not pattern.endswith("$"):
pattern += '$'
m = re.compile(pattern, re.IGNORECASE).match(text)
else:
m = re.compile(pattern, re.IGNORECASE).search(text)
if m:
return m.groups()
else:
return False
def match_ipv4_network(pattern, ipaddress_text):
network, network_bits = parse_ipv4_network(pattern) # is validated by valuespec
if network_bits == 0:
return True # event if ipaddress is empty
try:
ipaddress = parse_ipv4_address(ipaddress_text)
except:
return False # invalid address never matches
# first network_bits of network and ipaddress must be
# identical. Create a bitmask.
bitmask = 0
for n in range(32):
bitmask = bitmask << 1
if n < network_bits:
bit = 1
else:
bit = 0
bitmask += bit
return (network & bitmask) == (ipaddress & bitmask)
def parse_ipv4_address(text):
parts = map(int, text.split("."))
return (parts[0] << 24) + (parts[1] << 16) + (parts[2] << 8) + parts[3]
def parse_ipv4_network(text):
if "/" not in text:
return parse_ipv4_address(text), 32
network_text, bits_text = text.split("/")
return parse_ipv4_address(network_text), int(bits_text)
|
huiyiqun/check_mk
|
web/htdocs/mkeventd.py
|
Python
|
gpl-2.0
| 15,616
|
import re
class Aunt:
name = ''
def __init__(self, name):
self.name = name
self.count = {'children':-1,
'cats' : -1,
'samoyeds' : -1,
'pomeranians' : -1,
'akitas' : -1,
'vizslas' : -1,
'goldfish' : -1,
'trees' : -1,
'cars' : -1,
'perfumes' : -1}
def setCount(self, key, value):
self.count[key.strip()] = value
def filterKey(self, key, value):
if self.count[key] == -1:
return True
else:
return self.count[key] == value
def filterCalib(self, key, value):
if self.count[key] == -1:
return True
else:
if key in ('cats', 'trees'):
return self.count[key] > value
if key in ('pomeranians','goldfish'):
return self.count[key] < value
return self.count[key] == value
def filterRule(AuList, key, val):
RetAu = []
for Aun in AuList:
if Aun.filterCalib(key, val):
RetAu.append(Aun)
return RetAu
AuntList = []
fh = open('puzzle16.txt','r')
match_rule = re.compile('(?P<Name>Sue [0-9]{1,3}): (?P<attrib>.+)$')
for line in fh:
matches = match_rule.match(line)
a = Aunt(matches.group('Name'))
attrib_list = matches.group('attrib').split(',')
for attri in attrib_list:
key = attri.split(': ')[0]
val = int(attri.split(': ')[1])
a.setCount(key, val)
AuntList.append(a)
a = None
print len(AuntList)
F1 = filterRule(AuntList, 'children', 3)
print len(F1)
F2 = filterRule(F1, 'cats', 7)
print len(F2)
F3 = filterRule(F2, 'samoyeds', 2)
print len(F3)
F4 = filterRule(F3, 'pomeranians', 3)
print len(F4)
F5 = filterRule(F4, 'akitas', 0)
print len(F5)
F6 = filterRule(F5, 'vizslas', 0)
print len(F6)
F7 = filterRule(F6, 'goldfish', 5)
print len(F7)
F8 = filterRule(F7, 'trees', 3)
print len(F8)
F9 = filterRule(F8, 'cars', 2)
print len(F9)
F10 = filterRule(F9, 'perfumes', 1)
print len(F10)
|
hasteur/advent_of_code
|
2015/puzzle16.py
|
Python
|
gpl-2.0
| 2,024
|
"""
Example Directory
.. automodule:: pyatb.examples.get_current_price
"""
|
hsonntag/yatb
|
pyatb/examples/__init__.py
|
Python
|
gpl-2.0
| 77
|
# This file is part of BurnMan - a thermoelastic and thermodynamic toolkit
# for the Earth and Planetary Sciences
# Copyright (C) 2012 - 2021 by the BurnMan team, released under the GNU
# GPL v2 or later.
# This module provides higher level chemistry-related functions.
from __future__ import absolute_import
import numpy as np
from scipy.optimize import fsolve
from .. import constants
# Import common lower level functions for backwards compatibility
from ..utils.chemistry import dictionarize_formula, formula_mass
from ..utils.chemistry import formula_to_string, site_occupancies_to_strings
def fugacity(standard_material, assemblage):
"""
Parameters
----------
standard_material: burnman.Material object
set_method and set_state should already have been used
material must have a formula as a dictionary parameter
assemblage: burnman.Composite object
set_method and set_state should already have been used
Returns
-------
fugacity : float
Value of the fugacity of the component with respect to
the standard material
"""
component_formula = standard_material.params['formula']
chemical_potential = assemblage.chemical_potential([component_formula])[0]
fugacity = np.exp((chemical_potential - standard_material.gibbs)
/ (constants.gas_constant * assemblage.temperature))
return fugacity
def relative_fugacity(component_formula, assemblage, reference_assemblage):
"""
Parameters
----------
component_formula: dictionary
Chemical formula for which to compute the relative fugacity.
assemblage: burnman.Composite object
set_method and set_state should already have been used.
reference_assemblage: burnman.Composite object
set_method and set_state should already have been used.
Returns
-------
relative_fugacity : float
Value of the fugacity of the component in the assemblage
with respect to the reference_assemblage.
"""
chemical_potential = assemblage.chemical_potential([component_formula])[0]
reference_chemical_potential = reference_assemblage.chemical_potential([component_formula])[0]
relative_fugacity = np.exp((chemical_potential
- reference_chemical_potential)
/ (constants.gas_constant
* assemblage.temperature))
return relative_fugacity
def equilibrium_pressure(minerals, stoichiometry, temperature,
pressure_initial_guess=1.e5):
"""
Given a list of minerals, their reaction stoichiometries
and a temperature of interest, compute the
equilibrium pressure of the reaction.
Parameters
----------
minerals : list of minerals
List of minerals involved in the reaction.
stoichiometry : list of floats
Reaction stoichiometry for the minerals provided.
Reactants and products should have the opposite signs [mol]
temperature : float
Temperature of interest [K]
pressure_initial_guess : optional float
Initial pressure guess [Pa]
Returns
-------
pressure : float
The equilibrium pressure of the reaction [Pa]
"""
def eqm(P, T):
gibbs = 0.
for i, mineral in enumerate(minerals):
mineral.set_state(P[0], T)
gibbs = gibbs + mineral.gibbs * stoichiometry[i]
return gibbs
pressure = fsolve(eqm, [pressure_initial_guess], args=(temperature))[0]
return pressure
def equilibrium_temperature(minerals, stoichiometry, pressure, temperature_initial_guess=1000.):
"""
Given a list of minerals, their reaction stoichiometries
and a pressure of interest, compute the
equilibrium temperature of the reaction.
Parameters
----------
minerals : list of minerals
List of minerals involved in the reaction.
stoichiometry : list of floats
Reaction stoichiometry for the minerals provided.
Reactants and products should have the opposite signs [mol]
pressure : float
Pressure of interest [Pa]
temperature_initial_guess : optional float
Initial temperature guess [K]
Returns
-------
temperature : float
The equilibrium temperature of the reaction [K]
"""
def eqm(T, P):
gibbs = 0.
for i, mineral in enumerate(minerals):
mineral.set_state(P, T[0])
gibbs = gibbs + mineral.gibbs * stoichiometry[i]
return gibbs
temperature = fsolve(eqm, [temperature_initial_guess], args=(pressure))[0]
return temperature
def invariant_point(minerals_r1, stoichiometry_r1,
minerals_r2, stoichiometry_r2,
pressure_temperature_initial_guess=[1.e9, 1000.]):
"""
Given a list of minerals, their reaction stoichiometries
and a pressure of interest, compute the
equilibrium temperature of the reaction.
Parameters
----------
minerals : list of minerals
List of minerals involved in the reaction.
stoichiometry : list of floats
Reaction stoichiometry for the minerals provided.
Reactants and products should have the opposite signs [mol]
pressure : float
Pressure of interest [Pa]
temperature_initial_guess : optional float
Initial temperature guess [K]
Returns
-------
temperature : float
The equilibrium temperature of the reaction [K]
"""
def eqm(PT):
P, T = PT
gibbs_r1 = 0.
for i, mineral in enumerate(minerals_r1):
mineral.set_state(P, T)
gibbs_r1 = gibbs_r1 + mineral.gibbs * stoichiometry_r1[i]
gibbs_r2 = 0.
for i, mineral in enumerate(minerals_r2):
mineral.set_state(P, T)
gibbs_r2 = gibbs_r2 + mineral.gibbs * stoichiometry_r2[i]
return [gibbs_r1, gibbs_r2]
pressure, temperature = fsolve(eqm, pressure_temperature_initial_guess)
return pressure, temperature
def hugoniot(mineral, P_ref, T_ref, pressures, reference_mineral=None):
"""
Calculates the temperatures (and volumes) along a Hugoniot
as a function of pressure according to the Hugoniot equation
U2-U1 = 0.5*(p2 - p1)(V1 - V2) where U and V are the
internal energies and volumes (mass or molar) and U = F + TS
Parameters
----------
mineral : mineral
Mineral for which the Hugoniot is to be calculated.
P_ref : float
Reference pressure [Pa]
T_ref : float
Reference temperature [K]
pressures : numpy array of floats
Set of pressures [Pa] for which the Hugoniot temperature
and volume should be calculated
reference_mineral : mineral
Mineral which is stable at the reference conditions
Provides an alternative U_0 and V_0 when the reference
mineral transforms to the mineral of interest at some
(unspecified) pressure.
Returns
-------
temperatures : numpy array of floats
The Hugoniot temperatures at pressure
volumes : numpy array of floats
The Hugoniot volumes at pressure
"""
def Ediff(T, mineral, P, P_ref, U_ref, V_ref):
mineral.set_state(P, T[0])
U = mineral.helmholtz + T[0] * mineral.S
V = mineral.V
return (U - U_ref) - 0.5 * (P - P_ref) * (V_ref - V)
if reference_mineral is None:
reference_mineral = mineral
reference_mineral.set_state(P_ref, T_ref)
U_ref = reference_mineral.helmholtz + T_ref * reference_mineral.S
V_ref = reference_mineral.V
temperatures = np.empty_like(pressures)
volumes = np.empty_like(pressures)
for i, P in enumerate(pressures):
temperatures[i] = fsolve(
Ediff, [T_ref], args=(mineral, P, P_ref, U_ref, V_ref))[0]
volumes[i] = mineral.V
return temperatures, volumes
|
geodynamics/burnman
|
burnman/tools/chemistry.py
|
Python
|
gpl-2.0
| 8,056
|
# Copyright (C) 2013-2015 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Fox Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from time import time
from datetime import timedelta
from helpers.command import Command
@Command('uptime', ['handler'])
def cmd(send, _, args):
"""Shows the bot's uptime.
Syntax: {command}
"""
curr = time()
uptime = args['handler'].uptime
starttime = curr - uptime['start']
reloaded = curr - uptime['reloaded']
send("Time since start: %s" % timedelta(seconds=starttime))
send("Time since reload: %s" % timedelta(seconds=reloaded))
|
Polarcraft/KbveBot
|
commands/uptime.py
|
Python
|
gpl-2.0
| 1,314
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='pyapi-emergence',
url='',
author='Neil Newman, Jonathan Marini',
author_email='nnewman2@albany.edu, jmarini@ieee.org',
packages=['emergence'],
install_requires=['requests'],
)
|
wbg-optronix-lab/pyapi-emergence
|
setup.py
|
Python
|
gpl-2.0
| 357
|
# pylint:disable=R0201
from OpenOrange import *
from Document import Document
from Label import Label
from SQLTools import codeOrder, monthCode
from datetime import datetime
class AlotmentDoc(Document):
classattr = "classattr"
def getRecorda(self):
class newObj(object):
Status = 1
RootLabel = "100"
SerNr = "SerNr"
Labels = "100,200"
TransDate = datetime.now().date()
def name(self):
return "Alotment"
return newObj()
def getExtra(self, val1, val2="2", val3="3", val4=4):
specs = self.getRecorda()
sql = "WHERE?AND [al].{%s} IN ('%s')\n" % ("SerNr", "','".join([val1, val3, val2]))
sql += "WHERE?AND [al].{SerNr} = i|%i|\n" % specs.Status
sql += "WHERE?AND [al].TransDate < d|%s|\n" % specs.TransDate
sql += "WHERE?AND SerNr = "
if specs.Status == 1:
sql += "%s" % val4
if 1 in [0, 0]:
pass
else:
sql += ""
return sql
def getExtra2(self, test):
parent = self
specs = self.getRecorda()
mydict = {1:1, 2:2}
mylist = [1, 2]
listcomp = "listcomp," + "extra"
if test > 0:
return specs.Status
x = "'%s' as test_date\n, " % date("")
x += "'%s' as test_time\n, " % time("")
x += "'%i' as test_len\n, " % len(specs.RootLabel)
x += "'%s' as test_map\n, " % "','".join(map(str, mylist))
x += "'%s' as test_keys\n, " % "','".join(mydict.keys())
x += "'%s' as test_subscript\n," % ["SerNr","RoomType"][specs.Status]
#x += "'%s' as test_classattr\n, " % self.classattr
x += '"%s" as test_dic\n, ' % mydict
x += "'%s' as test_parentattr\n, " % parent.record #Parent None attribute
x += '"%s" as test_binoplist\n, ' % mylist #+ mylist
x += '"%s" as test_listcomp1\n, ' % "".join([a.strip() for a in listcomp.split(',')])
x += '"%s" as test_listcomp2\n, ' % "".join([d for d in listcomp])
x += '"%s" as test_listcomp3\n, ' % "".join([str(b) for b in listcomp])
x += '"%s" as test_listcomp4\n,' % "".join([c.strip() for c in listcomp])
x += '"%s" as test_listcomp5\n,' % [('s|%s|') % (z) for z in mylist]
x += '"%s" as test_listcomp6\n,' % "".join([y for y in ("a", "b")])
# pylint:disable=E1101
x += '"%s" as inferenceErr\n,' % self.non.existant
x += '"%s" as indexErr\n' % mylist[2]
return x
def getExtra3(self):
specs = self.getRecorda()
subquery = Query()
subquery.sql = "SerNr"
return "ORDER BY %s, %s" % (specs.SerNr, subquery.sql)
def getExtra4(self):
specs = self.getRecorda()
labels = None
if specs.Labels:
lis = []
labs = specs.Labels.split(",")
for lb in labs:
lis.append("','".join(Label.getTreeLeaves(lb)))
labels = "','".join(lis)
return "WHERE?AND SerNr IN ('%s') " % labels
def getExtra5(self, txt):
txt = txt.replace(":1","RoomType IS NULL\n")
return txt
def getExtra6(self):
txt = ""
q = {}
q["one"] = Query()
q["one"].sql = "WHERE?AND SerNr IS NULL\n"
q["two"] = Query()
q["two"].sql = "WHERE?AND SerNr IS NOT NULL\n"
slist = ["one", "two"]
for index in slist:
txt += q[index].sql
return txt
def getExtra7(self):
specs = self.getRecorda()
factor = 0.0
if 1 > 0:
factor = (float(specs.Status) / float(specs.Status))
txt = "WHERE?AND (%s / 1) * %s > 0\n" % (1, factor)
return txt
def run(self):
specs = self.getRecorda()
leaves = Label.getTreeLeaves(specs.RootLabel)
query7 = Query()
query7.sql = "SELECT SerNr, %s,\n" % codeOrder("SerNr", leaves)
query7.sql += monthCode("[al].TransDate")
query7.sql += "\n, %s, \n" % self.getExtra2(test=1)
query7.sql += self.getExtra2(0)
query7.sql += "\nFROM %s al\n" % specs.name()
query7.sql += self.getExtra("1", "2", val3="33")
query7.sql += self.getExtra4()
query7.sql += self.getExtra5("WHERE?AND :1")
query7.sql += self.getExtra6()
query7.sql += self.getExtra7()
method = getattr(self, "getExtra3____"[:-4])
query7.sql += method()
query7.open()
self.run2([100, 200])
def run2(self, extraList):
query2 = Query()
query2.sql = self.getMore(extraList)
query2.open()
def getMore(self, moreList):
return "SELECT * FROM Alotment WHERE SerNr IN ('%s')" % "','".join(moreList)
|
ancho85/pylint-playero-plugin
|
tests/input/func_noerror_query_getattr.py
|
Python
|
gpl-2.0
| 4,814
|
# -*- coding: utf-8 -*-
#
# test_connect_all_to_all.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy as np
import scipy.stats
import test_connect_helpers as hf
from test_connect_parameters import TestParams
@hf.nest.ll_api.check_stack
class TestAllToAll(TestParams):
# specify connection pattern
rule = 'all_to_all'
conn_dict = {'rule': rule}
# sizes of populations
N1 = 6
N2 = 7
N1_array = 500
N2_array = 10
def testConnectivity(self):
self.setUpNetwork(self.conn_dict)
# make sure all connections do exist
M = hf.get_connectivity_matrix(self.pop1, self.pop2)
M_all = np.ones((len(self.pop2), len(self.pop1)))
hf.mpi_assert(M, M_all, self)
# make sure no connections were drawn from the target to the source
# population
M = hf.get_connectivity_matrix(self.pop2, self.pop1)
M_none = np.zeros((len(self.pop1), len(self.pop2)))
hf.mpi_assert(M, M_none, self)
def testInputArray(self):
for label in ['weight', 'delay']:
syn_params = {}
if label == 'weight':
self.param_array = np.arange(
self.N1_array * self.N2_array, dtype=float
).reshape(self.N2_array, self.N1_array)
elif label == 'delay':
self.param_array = np.arange(
1, self.N1_array * self.N2_array + 1
).reshape(self.N2_array, self.N1_array) * 0.1
syn_params[label] = self.param_array
hf.nest.ResetKernel()
self.setUpNetwork(self.conn_dict, syn_params,
N1=self.N1_array, N2=self.N2_array)
M_nest = hf.get_weighted_connectivity_matrix(
self.pop1, self.pop2, label)
hf.mpi_assert(M_nest, self.param_array, self)
def testInputArrayWithoutAutapses(self):
self.conn_dict['allow_autapses'] = False
for label in ['weight', 'delay']:
syn_params = {}
if label == 'weight':
self.param_array = np.arange(
self.N1 * self.N1, dtype=float).reshape(self.N1, self.N1)
elif label == 'delay':
self.param_array = np.arange(
1, self.N1 * self.N1 + 1).reshape(self.N1, self.N1) * 0.1
syn_params[label] = self.param_array
self.setUpNetworkOnePop(self.conn_dict, syn_params)
M_nest = hf.get_weighted_connectivity_matrix(
self.pop, self.pop, label)
np.fill_diagonal(self.param_array, 0)
hf.mpi_assert(M_nest, self.param_array, self)
def testInputArrayRPort(self):
syn_params = {}
neuron_model = 'iaf_psc_exp_multisynapse'
neuron_dict = {'tau_syn': [0.1 + i for i in range(self.N2)]}
self.pop1 = hf.nest.Create(neuron_model, self.N1)
self.pop2 = hf.nest.Create(neuron_model, self.N2, neuron_dict)
self.param_array = np.transpose(np.asarray(
[np.arange(1, self.N2 + 1) for i in range(self.N1)]))
syn_params['receptor_type'] = self.param_array
hf.nest.Connect(self.pop1, self.pop2, self.conn_dict, syn_params)
M = hf.get_weighted_connectivity_matrix(
self.pop1, self.pop2, 'receptor')
hf.mpi_assert(M, self.param_array, self)
def testInputArrayToStdpSynapse(self):
params = ['Wmax', 'alpha', 'lambda', 'mu_minus', 'mu_plus', 'tau_plus']
syn_params = {'synapse_model': 'stdp_synapse'}
values = [
np.arange(self.N1 * self.N2, dtype=float).reshape(self.N2, self.N1)
for i in range(6)
]
for i, param in enumerate(params):
syn_params[param] = values[i]
self.setUpNetwork(self.conn_dict, syn_params)
for i, param in enumerate(params):
a = hf.get_weighted_connectivity_matrix(
self.pop1, self.pop2, param)
hf.mpi_assert(a, values[i], self)
# test single threaded for now
def testRPortDistribution(self):
n_rport = 10
nr_neurons = 100
hf.nest.ResetKernel() # To reset local_num_threads
neuron_model = 'iaf_psc_exp_multisynapse'
neuron_dict = {'tau_syn': [0.1 + i for i in range(n_rport)]}
self.pop1 = hf.nest.Create(neuron_model, nr_neurons, neuron_dict)
self.pop2 = hf.nest.Create(neuron_model, nr_neurons, neuron_dict)
syn_params = {'synapse_model': 'static_synapse'}
syn_params['receptor_type'] = 1 + hf.nest.random.uniform_int(n_rport)
hf.nest.Connect(self.pop1, self.pop2, self.conn_dict, syn_params)
M = hf.get_weighted_connectivity_matrix(
self.pop1, self.pop2, 'receptor')
M = hf.gather_data(M)
if M is not None:
M = M.flatten()
frequencies = scipy.stats.itemfreq(M)
self.assertTrue(np.array_equal(frequencies[:, 0], np.arange(
1, n_rport + 1)), 'Missing or invalid rports')
chi, p = scipy.stats.chisquare(frequencies[:, 1])
self.assertGreater(p, self.pval)
def suite():
suite = unittest.TestLoader().loadTestsFromTestCase(TestAllToAll)
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == '__main__':
run()
|
lekshmideepu/nest-simulator
|
testsuite/pytests/test_connect_all_to_all.py
|
Python
|
gpl-2.0
| 6,019
|
# !/usr/bin/python
# -*- coding: utf-8 -*-
import urllib
import urllib2
import cookielib
import base64
import re
import json
import hashlib
'''该登录程序是参考网上写的'''
cj = cookielib.LWPCookieJar()
cookie_support = urllib2.HTTPCookieProcessor(cj)
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
postdata = {
'entry': 'weibo',
'gateway': '1',
'from': '',
'savestate': '7',
'userticket': '1',
'ssosimplelogin': '1',
'vsnf': '1',
'vsnval': '',
'su': '',
'service': 'miniblog',
'servertime': '',
'nonce': '',
'pwencode': 'wsse',
'sp': '',
'encoding': 'UTF-8',
'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack',
'returntype': 'META'
}
def get_servertime():
url = 'http://login.sina.com.cn/sso/prelogin.php?entry=weibo&callback=sinaSSOController.preloginCallBack&su=dW5kZWZpbmVk&client=ssologin.js(v1.3.18)&_=1329806375939'
data = urllib2.urlopen(url).read()
p = re.compile('\((.*)\)')
try:
json_data = p.search(data).group(1)
data = json.loads(json_data)
servertime = str(data['servertime'])
nonce = data['nonce']
return servertime, nonce
except:
print 'Get severtime error!'
return None
def get_pwd(pwd, servertime, nonce):
pwd1 = hashlib.sha1(pwd).hexdigest()
pwd2 = hashlib.sha1(pwd1).hexdigest()
pwd3_ = pwd2 + servertime + nonce
pwd3 = hashlib.sha1(pwd3_).hexdigest()
return pwd3
def get_user(username):
username_ = urllib.quote(username)
username = base64.encodestring(username_)[:-1]
return username
def enableCookie():
cookiejar = cookielib.LWPCookieJar()
cookie_support = urllib2.HTTPCookieProcessor(cookiejar)
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
def login( username, pwd ):
url = 'http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.3.18)'
#enableCookie()
try:
servertime, nonce = get_servertime()
except:
return
global postdata
postdata['servertime'] = servertime
postdata['nonce'] = nonce
postdata['su'] = get_user(username)
postdata['sp'] = get_pwd(pwd, servertime, nonce)
postdata = urllib.urlencode(postdata)
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:8.0) Gecko/20100101 Firefox/8.0'}
req = urllib2.Request(
url = url,
data = postdata,
headers = headers
)
result = urllib2.urlopen(req)
text = result.read()
p = re.compile('location\.replace\(\'(.*?)\'\)')
try:
login_url = p.search(text).group(1)
#print login_url
urllib2.urlopen(login_url)
print "Login success!"
return True
except:
print 'Login error!'
return False
|
wangtaoking1/found_website
|
项目代码/Login.py
|
Python
|
gpl-2.0
| 2,910
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Common functions
"""
# Import Local Modules
from marvin.cloudstackAPI import (listConfigurations,
listPhysicalNetworks,
listRegions,
addNetworkServiceProvider,
updateNetworkServiceProvider,
listDomains,
listZones,
listPods,
listOsTypes,
listTemplates,
updateResourceLimit,
listRouters,
listNetworks,
listClusters,
listSystemVms,
listStoragePools,
listVirtualMachines,
listLoadBalancerRuleInstances,
listFirewallRules,
listVolumes,
listIsos,
listAccounts,
listSnapshotPolicies,
listDiskOfferings,
listVlanIpRanges,
listUsageRecords,
listNetworkServiceProviders,
listHosts,
listPublicIpAddresses,
listPortForwardingRules,
listLoadBalancerRules,
listSnapshots,
listUsers,
listEvents,
listServiceOfferings,
listVirtualRouterElements,
listNetworkOfferings,
listResourceLimits,
listVPCOfferings,
migrateSystemVm)
from marvin.sshClient import SshClient
from marvin.codes import (PASS, FAILED, ISOLATED_NETWORK, VPC_NETWORK,
BASIC_ZONE, FAIL, NAT_RULE, STATIC_NAT_RULE,
RESOURCE_PRIMARY_STORAGE, RESOURCE_SECONDARY_STORAGE,
RESOURCE_CPU, RESOURCE_MEMORY, PUBLIC_TRAFFIC,
GUEST_TRAFFIC, MANAGEMENT_TRAFFIC, STORAGE_TRAFFIC,
VMWAREDVS)
from marvin.lib.utils import (validateList,
xsplit,
get_process_status,
random_gen,
format_volume_to_ext3)
from marvin.lib.base import (PhysicalNetwork,
PublicIPAddress,
NetworkOffering,
NATRule,
StaticNATRule,
Volume,
Account,
Project,
Snapshot,
NetScaler,
VirtualMachine,
FireWallRule,
Template,
Network,
Host,
Resources,
Configurations,
Router,
PublicIpRange,
StorageNetworkIpRange,
TrafficType)
from marvin.lib.vcenter import Vcenter
from netaddr import IPAddress
import random
import re
import itertools
import random
import hashlib
# Import System modules
import time
def is_config_suitable(apiclient, name, value):
"""
Ensure if the deployment has the expected `value` for the global setting `name'
@return: true if value is set, else false
"""
configs = Configurations.list(apiclient, name=name)
assert(
configs is not None and isinstance(
configs,
list) and len(
configs) > 0)
return configs[0].value == value
def wait_for_cleanup(apiclient, configs=None):
"""Sleeps till the cleanup configs passed"""
# Configs list consists of the list of global configs
if not isinstance(configs, list):
return
for config in configs:
cmd = listConfigurations.listConfigurationsCmd()
cmd.name = config
cmd.listall = True
try:
config_descs = apiclient.listConfigurations(cmd)
except Exception as e:
raise Exception("Failed to fetch configurations: %s" % e)
if not isinstance(config_descs, list):
raise Exception("List configs didn't returned a valid data")
config_desc = config_descs[0]
# Sleep for the config_desc.value time
time.sleep(int(config_desc.value))
return
def add_netscaler(apiclient, zoneid, NSservice):
""" Adds Netscaler device and enables NS provider"""
cmd = listPhysicalNetworks.listPhysicalNetworksCmd()
cmd.zoneid = zoneid
physical_networks = apiclient.listPhysicalNetworks(cmd)
if isinstance(physical_networks, list):
physical_network = physical_networks[0]
cmd = listNetworkServiceProviders.listNetworkServiceProvidersCmd()
cmd.name = 'Netscaler'
cmd.physicalnetworkid = physical_network.id
nw_service_providers = apiclient.listNetworkServiceProviders(cmd)
if isinstance(nw_service_providers, list):
netscaler_provider = nw_service_providers[0]
else:
cmd1 = addNetworkServiceProvider.addNetworkServiceProviderCmd()
cmd1.name = 'Netscaler'
cmd1.physicalnetworkid = physical_network.id
netscaler_provider = apiclient.addNetworkServiceProvider(cmd1)
netscaler = NetScaler.add(
apiclient,
NSservice,
physicalnetworkid=physical_network.id
)
if netscaler_provider.state != 'Enabled':
cmd = updateNetworkServiceProvider.updateNetworkServiceProviderCmd()
cmd.id = netscaler_provider.id
cmd.state = 'Enabled'
apiclient.updateNetworkServiceProvider(cmd)
return netscaler
def get_region(apiclient, region_id=None, region_name=None):
'''
@name : get_region
@Desc : Returns the Region Information for a given region id or region name
@Input : region_name: Name of the Region
region_id : Id of the region
@Output : 1. Region Information for the passed inputs else first Region
2. FAILED In case the cmd failed
'''
cmd = listRegions.listRegionsCmd()
if region_name is not None:
cmd.name = region_name
if region_id is not None:
cmd.id = region_id
cmd_out = apiclient.listRegions(cmd)
return FAILED if validateList(cmd_out)[0] != PASS else cmd_out[0]
def get_domain(apiclient, domain_id=None, domain_name=None):
'''
@name : get_domain
@Desc : Returns the Domain Information for a given domain id or domain name
@Input : domain id : Id of the Domain
domain_name : Name of the Domain
@Output : 1. Domain Information for the passed inputs else first Domain
2. FAILED In case the cmd failed
'''
cmd = listDomains.listDomainsCmd()
if domain_name is not None:
cmd.name = domain_name
if domain_id is not None:
cmd.id = domain_id
cmd_out = apiclient.listDomains(cmd)
if validateList(cmd_out)[0] != PASS:
return FAILED
return cmd_out[0]
def find_storage_pool_type(apiclient, storagetype='NetworkFileSystem'):
"""
@name : find_storage_pool_type
@Desc : Returns true if the given storage pool type exists
@Input : type : type of the storage pool[NFS, RBD, etc.,]
@Output : True : if the type of storage is found
False : if the type of storage is not found
FAILED In case the cmd failed
"""
cmd = listStoragePools.listStoragePoolsCmd()
cmd_out = apiclient.listStoragePools(cmd)
if validateList(cmd_out)[0] != PASS:
return FAILED
for storage_pool in cmd_out:
if storage_pool.type.lower() == storagetype:
return True
return False
def get_zone(apiclient, zone_name=None, zone_id=None):
'''
@name : get_zone
@Desc :Returns the Zone Information for a given zone id or Zone Name
@Input : zone_name: Name of the Zone
zone_id : Id of the zone
@Output : 1. Zone Information for the passed inputs else first zone
2. FAILED In case the cmd failed
'''
cmd = listZones.listZonesCmd()
if zone_name is not None:
cmd.name = zone_name
if zone_id is not None:
cmd.id = zone_id
cmd_out = apiclient.listZones(cmd)
if validateList(cmd_out)[0] != PASS:
return FAILED
'''
Check if input zone name and zone id is None,
then return first element of List Zones command
'''
return cmd_out[0]
def get_physical_networks(apiclient, zoneid):
'''
@name : get_physical_networks
@Desc :Returns A list of the Physical Networks in the given Zone
@Input : zoneid: The Zone ID
@Output : 1. A list containing the Physical Networks
'''
cmd = listPhysicalNetworks.listPhysicalNetworksCmd()
cmd.zoneid = zoneid
physical_networks = apiclient.listPhysicalNetworks(cmd)
return physical_networks
def get_pod(apiclient, zone_id=None, pod_id=None, pod_name=None):
'''
@name : get_pod
@Desc : Returns the Pod Information for a given zone id or Zone Name
@Input : zone_id: Id of the Zone
pod_name : Name of the Pod
pod_id : Id of the Pod
@Output : 1. Pod Information for the pod
2. FAILED In case the cmd failed
'''
cmd = listPods.listPodsCmd()
if pod_name is not None:
cmd.name = pod_name
if pod_id is not None:
cmd.id = pod_id
if zone_id is not None:
cmd.zoneid = zone_id
cmd_out = apiclient.listPods(cmd)
if validateList(cmd_out)[0] != PASS:
return FAILED
return cmd_out[0]
def get_template(
apiclient, zone_id=None, ostype_desc=None, template_filter="featured", template_type='BUILTIN',
template_id=None, template_name=None, account=None, domain_id=None, project_id=None,
hypervisor=None):
'''
@Name : get_template
@Desc : Retrieves the template Information based upon inputs provided
Template is retrieved based upon either of the inputs matched
condition
@Input : returns a template"
@Output : FAILED in case of any failure
template Information matching the inputs
'''
cmd = listTemplates.listTemplatesCmd()
cmd.templatefilter = template_filter
if domain_id is not None:
cmd.domainid = domain_id
if zone_id is not None:
cmd.zoneid = zone_id
if template_id is not None:
cmd.id = template_id
if template_name is not None:
cmd.name = template_name
if hypervisor is not None:
cmd.hypervisor = hypervisor
if project_id is not None:
cmd.projectid = project_id
if account is not None:
cmd.account = account
'''
Get the Templates pertaining to the inputs provided
'''
list_templatesout = apiclient.listTemplates(cmd)
if validateList(list_templatesout)[0] != PASS:
return FAILED
for template in list_templatesout:
if template.isready and template.templatetype == template_type:
return template
'''
Return default first template, if no template matched
'''
return list_templatesout[0]
def get_windows_template(
apiclient, zone_id=None, ostype_desc=None, template_filter="featured", template_type='USER',
template_id=None, template_name=None, account=None, domain_id=None, project_id=None,
hypervisor=None):
'''
@Name : get_template
@Desc : Retrieves the template Information based upon inputs provided
Template is retrieved based upon either of the inputs matched
condition
@Input : returns a template"
@Output : FAILED in case of any failure
template Information matching the inputs
'''
cmd = listTemplates.listTemplatesCmd()
cmd.templatefilter = template_filter
if domain_id is not None:
cmd.domainid = domain_id
if zone_id is not None:
cmd.zoneid = zone_id
if template_id is not None:
cmd.id = template_id
if template_name is not None:
cmd.name = template_name
if hypervisor is not None:
cmd.hypervisor = hypervisor
if project_id is not None:
cmd.projectid = project_id
if account is not None:
cmd.account = account
'''
Get the Templates pertaining to the inputs provided
'''
list_templatesout = apiclient.listTemplates(cmd)
#print("template result is %s"%(list_templatesout))
if list_templatesout is None:
return FAILED
if validateList(list_templatesout[0]) == FAIL :
return FAILED
for template in list_templatesout:
if template.isready and template.templatetype == "USER" and template.ostypename == ostype_desc:
return template
'''
Return default first template, if no template matched
'''
return FAILED
def download_systemplates_sec_storage(server, services):
"""Download System templates on sec storage"""
try:
# Login to management server
ssh = SshClient(
server["ipaddress"],
server["port"],
server["username"],
server["password"]
)
except Exception:
raise Exception("SSH access failed for server with IP address: %s" %
server["ipaddess"])
# Mount Secondary Storage on Management Server
cmds = [
"mkdir -p %s" % services["mnt_dir"],
"mount -t nfs %s:/%s %s" % (
services["sec_storage"],
services["path"],
services["mnt_dir"]
),
"%s -m %s -u %s -h %s -F" % (
services["command"],
services["mnt_dir"],
services["download_url"],
services["hypervisor"]
)
]
for c in cmds:
result = ssh.execute(c)
res = str(result)
# Unmount the Secondary storage
ssh.execute("umount %s" % (services["mnt_dir"]))
if res.count("Successfully installed system VM template") == 1:
return
else:
raise Exception("Failed to download System Templates on Sec Storage")
return
def wait_for_ssvms(apiclient, zoneid, podid, interval=60):
"""After setup wait for SSVMs to come Up"""
time.sleep(interval)
timeout = 40
while True:
list_ssvm_response = list_ssvms(
apiclient,
systemvmtype='secondarystoragevm',
zoneid=zoneid,
podid=podid
)
ssvm = list_ssvm_response[0]
if ssvm.state != 'Running':
# Sleep to ensure SSVMs are Up and Running
time.sleep(interval)
timeout = timeout - 1
elif ssvm.state == 'Running':
break
elif timeout == 0:
raise Exception("SSVM failed to come up")
break
timeout = 40
while True:
list_ssvm_response = list_ssvms(
apiclient,
systemvmtype='consoleproxy',
zoneid=zoneid,
podid=podid
)
cpvm = list_ssvm_response[0]
if cpvm.state != 'Running':
# Sleep to ensure SSVMs are Up and Running
time.sleep(interval)
timeout = timeout - 1
elif cpvm.state == 'Running':
break
elif timeout == 0:
raise Exception("CPVM failed to come up")
break
return
def get_builtin_template_info(apiclient, zoneid):
"""Returns hypervisor specific infor for templates"""
list_template_response = Template.list(
apiclient,
templatefilter='featured',
zoneid=zoneid,
)
for b_template in list_template_response:
if b_template.templatetype == 'BUILTIN':
break
extract_response = Template.extract(apiclient,
b_template.id,
'HTTP_DOWNLOAD',
zoneid)
return extract_response.url, b_template.hypervisor, b_template.format
def download_builtin_templates(apiclient, zoneid, hypervisor, host,
linklocalip, interval=60):
"""After setup wait till builtin templates are downloaded"""
# Change IPTABLES Rules
get_process_status(
host["ipaddress"],
host["port"],
host["username"],
host["password"],
linklocalip,
"iptables -P INPUT ACCEPT"
)
time.sleep(interval)
# Find the BUILTIN Templates for given Zone, Hypervisor
list_template_response = list_templates(
apiclient,
hypervisor=hypervisor,
zoneid=zoneid,
templatefilter='self'
)
if not isinstance(list_template_response, list):
raise Exception("Failed to download BUILTIN templates")
# Ensure all BUILTIN templates are downloaded
templateid = None
for template in list_template_response:
if template.templatetype == "BUILTIN":
templateid = template.id
# Sleep to ensure that template is in downloading state after adding
# Sec storage
time.sleep(interval)
while True:
template_response = list_templates(
apiclient,
id=templateid,
zoneid=zoneid,
templatefilter='self'
)
template = template_response[0]
# If template is ready,
# template.status = Download Complete
# Downloading - x% Downloaded
# Error - Any other string
if template.status == 'Download Complete':
break
elif 'Downloaded' in template.status:
time.sleep(interval)
elif 'Installing' not in template.status:
raise Exception("ErrorInDownload")
return
def update_resource_limit(apiclient, resourcetype, account=None,
domainid=None, max=None, projectid=None):
"""Updates the resource limit to 'max' for given account"""
cmd = updateResourceLimit.updateResourceLimitCmd()
cmd.resourcetype = resourcetype
if account:
cmd.account = account
if domainid:
cmd.domainid = domainid
if max:
cmd.max = max
if projectid:
cmd.projectid = projectid
apiclient.updateResourceLimit(cmd)
return
def list_os_types(apiclient, **kwargs):
"""List all os types matching criteria"""
cmd = listOsTypes.listOsTypesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listOsTypes(cmd))
def list_routers(apiclient, **kwargs):
"""List all Routers matching criteria"""
cmd = listRouters.listRoutersCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listRouters(cmd))
def list_zones(apiclient, **kwargs):
"""List all Zones matching criteria"""
cmd = listZones.listZonesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listZones(cmd))
def list_networks(apiclient, **kwargs):
"""List all Networks matching criteria"""
cmd = listNetworks.listNetworksCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listNetworks(cmd))
def list_clusters(apiclient, **kwargs):
"""List all Clusters matching criteria"""
cmd = listClusters.listClustersCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listClusters(cmd))
def list_ssvms(apiclient, **kwargs):
"""List all SSVMs matching criteria"""
cmd = listSystemVms.listSystemVmsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listSystemVms(cmd))
def list_storage_pools(apiclient, **kwargs):
"""List all storage pools matching criteria"""
cmd = listStoragePools.listStoragePoolsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listStoragePools(cmd))
def list_virtual_machines(apiclient, **kwargs):
"""List all VMs matching criteria"""
cmd = listVirtualMachines.listVirtualMachinesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listVirtualMachines(cmd))
def list_hosts(apiclient, **kwargs):
"""List all Hosts matching criteria"""
cmd = listHosts.listHostsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listHosts(cmd))
def list_configurations(apiclient, **kwargs):
"""List configuration with specified name"""
cmd = listConfigurations.listConfigurationsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listConfigurations(cmd))
def list_publicIP(apiclient, **kwargs):
"""List all Public IPs matching criteria"""
cmd = listPublicIpAddresses.listPublicIpAddressesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listPublicIpAddresses(cmd))
def list_nat_rules(apiclient, **kwargs):
"""List all NAT rules matching criteria"""
cmd = listPortForwardingRules.listPortForwardingRulesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listPortForwardingRules(cmd))
def list_lb_rules(apiclient, **kwargs):
"""List all Load balancing rules matching criteria"""
cmd = listLoadBalancerRules.listLoadBalancerRulesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listLoadBalancerRules(cmd))
def list_lb_instances(apiclient, **kwargs):
"""List all Load balancing instances matching criteria"""
cmd = listLoadBalancerRuleInstances.listLoadBalancerRuleInstancesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listLoadBalancerRuleInstances(cmd))
def list_firewall_rules(apiclient, **kwargs):
"""List all Firewall Rules matching criteria"""
cmd = listFirewallRules.listFirewallRulesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listFirewallRules(cmd))
def list_volumes(apiclient, **kwargs):
"""List all volumes matching criteria"""
cmd = listVolumes.listVolumesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listVolumes(cmd))
def list_isos(apiclient, **kwargs):
"""Lists all available ISO files."""
cmd = listIsos.listIsosCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listIsos(cmd))
def list_snapshots(apiclient, **kwargs):
"""List all snapshots matching criteria"""
cmd = listSnapshots.listSnapshotsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listSnapshots(cmd))
def list_templates(apiclient, **kwargs):
"""List all templates matching criteria"""
cmd = listTemplates.listTemplatesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listTemplates(cmd))
def list_domains(apiclient, **kwargs):
"""Lists domains"""
cmd = listDomains.listDomainsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listDomains(cmd))
def list_accounts(apiclient, **kwargs):
"""Lists accounts and provides detailed account information for
listed accounts"""
cmd = listAccounts.listAccountsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listAccounts(cmd))
def list_users(apiclient, **kwargs):
"""Lists users and provides detailed account information for
listed users"""
cmd = listUsers.listUsersCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listUsers(cmd))
def list_snapshot_policy(apiclient, **kwargs):
"""Lists snapshot policies."""
cmd = listSnapshotPolicies.listSnapshotPoliciesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listSnapshotPolicies(cmd))
def list_events(apiclient, **kwargs):
"""Lists events"""
cmd = listEvents.listEventsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listEvents(cmd))
def list_disk_offering(apiclient, **kwargs):
"""Lists all available disk offerings."""
cmd = listDiskOfferings.listDiskOfferingsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listDiskOfferings(cmd))
def list_service_offering(apiclient, **kwargs):
"""Lists all available service offerings."""
cmd = listServiceOfferings.listServiceOfferingsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listServiceOfferings(cmd))
def list_vlan_ipranges(apiclient, **kwargs):
"""Lists all VLAN IP ranges."""
cmd = listVlanIpRanges.listVlanIpRangesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listVlanIpRanges(cmd))
def list_usage_records(apiclient, **kwargs):
"""Lists usage records for accounts"""
cmd = listUsageRecords.listUsageRecordsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listUsageRecords(cmd))
def list_nw_service_prividers(apiclient, **kwargs):
"""Lists Network service providers"""
cmd = listNetworkServiceProviders.listNetworkServiceProvidersCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listNetworkServiceProviders(cmd))
def list_virtual_router_elements(apiclient, **kwargs):
"""Lists Virtual Router elements"""
cmd = listVirtualRouterElements.listVirtualRouterElementsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listVirtualRouterElements(cmd))
def list_network_offerings(apiclient, **kwargs):
"""Lists network offerings"""
cmd = listNetworkOfferings.listNetworkOfferingsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listNetworkOfferings(cmd))
def list_resource_limits(apiclient, **kwargs):
"""Lists resource limits"""
cmd = listResourceLimits.listResourceLimitsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listResourceLimits(cmd))
def list_vpc_offerings(apiclient, **kwargs):
""" Lists VPC offerings """
cmd = listVPCOfferings.listVPCOfferingsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd.listall=True
return(apiclient.listVPCOfferings(cmd))
def update_resource_count(apiclient, domainid, accountid=None,
projectid=None, rtype=None):
"""updates the resource count
0 - VM
1 - Public IP
2 - Volume
3 - Snapshot
4 - Template
5 - Projects
6 - Network
7 - VPC
8 - CPUs
9 - RAM
10 - Primary (shared) storage (Volumes)
11 - Secondary storage (Snapshots, Templates & ISOs)
"""
Resources.updateCount(apiclient,
domainid=domainid,
account=accountid if accountid else None,
projectid=projectid if projectid else None,
resourcetype=rtype if rtype else None
)
return
def findSuitableHostForMigration(apiclient, vmid):
"""Returns a suitable host for VM migration"""
suitableHost = None
try:
hosts = Host.listForMigration(apiclient, virtualmachineid=vmid,
)
except Exception as e:
raise Exception("Exception while getting hosts list suitable for migration: %s" % e)
suitablehosts = []
if isinstance(hosts, list) and len(hosts) > 0:
suitablehosts = [host for host in hosts if (str(host.resourcestate).lower() == "enabled"\
and str(host.state).lower() == "up")]
if len(suitablehosts)>0:
suitableHost = suitablehosts[0]
return suitableHost
def get_resource_type(resource_id):
"""Returns resource type"""
lookup = {0: "VM",
1: "Public IP",
2: "Volume",
3: "Snapshot",
4: "Template",
5: "Projects",
6: "Network",
7: "VPC",
8: "CPUs",
9: "RAM",
10: "Primary (shared) storage (Volumes)",
11: "Secondary storage (Snapshots, Templates & ISOs)"
}
return lookup[resource_id]
def get_free_vlan(apiclient, zoneid):
"""
Find an unallocated VLAN outside the range allocated to the physical network.
@note: This does not guarantee that the VLAN is available for use in
the deployment's network gear
@return: physical_network, shared_vlan_tag
"""
list_physical_networks_response = PhysicalNetwork.list(
apiclient,
zoneid=zoneid
)
assert isinstance(list_physical_networks_response, list)
assert len(
list_physical_networks_response) > 0, "No physical networks found in zone %s" % zoneid
physical_network = list_physical_networks_response[0]
networks = list_networks(apiclient, zoneid=zoneid)
usedVlanIds = []
if isinstance(networks, list) and len(networks) > 0:
usedVlanIds = [int(nw.vlan)
for nw in networks if (nw.vlan and str(nw.vlan).lower() != "untagged")]
if not hasattr(physical_network, "vlan"):
while True:
shared_ntwk_vlan = random.randrange(1, 4095)
if shared_ntwk_vlan in usedVlanIds:
continue
else:
break
else:
vlans = xsplit(physical_network.vlan, ['-', ','])
assert len(vlans) > 0
assert int(vlans[0]) < int(
vlans[-1]), "VLAN range %s was improperly split" % physical_network.vlan
# Assuming random function will give different integer each time
retriesCount = 20
shared_ntwk_vlan = None
while True:
if retriesCount == 0:
break
free_vlan = int(vlans[-1]) + random.randrange(1, 20)
if free_vlan > 4095:
free_vlan = int(vlans[0]) - random.randrange(1, 20)
if free_vlan < 0 or (free_vlan in usedVlanIds):
retriesCount -= 1
continue
else:
shared_ntwk_vlan = free_vlan
break
return physical_network, shared_ntwk_vlan
def setNonContiguousVlanIds(apiclient, zoneid):
"""
Form the non contiguous ranges based on currently assigned range in physical network
"""
NonContigVlanIdsAcquired = False
list_physical_networks_response = PhysicalNetwork.list(
apiclient,
zoneid=zoneid
)
assert isinstance(list_physical_networks_response, list)
assert len(
list_physical_networks_response) > 0, "No physical networks found in zone %s" % zoneid
for physical_network in list_physical_networks_response:
vlans = xsplit(physical_network.vlan, ['-', ','])
assert len(vlans) > 0
assert int(vlans[0]) < int(
vlans[-1]), "VLAN range %s was improperly split" % physical_network.vlan
# Keep some gap between existing vlan and the new vlans which we are going to add
# So that they are non contiguous
non_contig_end_vlan_id = int(vlans[-1]) + 6
non_contig_start_vlan_id = int(vlans[0]) - 6
# Form ranges which are consecutive to existing ranges but not immediately contiguous
# There should be gap in between existing range and new non contiguous
# ranage
# If you can't add range after existing range, because it's crossing 4095, then
# select VLAN ids before the existing range such that they are greater than 0, and
# then add this non contiguoud range
vlan = {"partial_range": ["", ""], "full_range": ""}
if non_contig_end_vlan_id < 4095:
vlan["partial_range"][0] = str(
non_contig_end_vlan_id - 4) + '-' + str(non_contig_end_vlan_id - 3)
vlan["partial_range"][1] = str(
non_contig_end_vlan_id - 1) + '-' + str(non_contig_end_vlan_id)
vlan["full_range"] = str(
non_contig_end_vlan_id - 4) + '-' + str(non_contig_end_vlan_id)
NonContigVlanIdsAcquired = True
elif non_contig_start_vlan_id > 0:
vlan["partial_range"][0] = str(
non_contig_start_vlan_id) + '-' + str(non_contig_start_vlan_id + 1)
vlan["partial_range"][1] = str(
non_contig_start_vlan_id + 3) + '-' + str(non_contig_start_vlan_id + 4)
vlan["full_range"] = str(
non_contig_start_vlan_id) + '-' + str(non_contig_start_vlan_id + 4)
NonContigVlanIdsAcquired = True
else:
NonContigVlanIdsAcquired = False
# If failed to get relevant vlan ids, continue to next physical network
# else break from loop as we have hot the non contiguous vlan ids for
# the test purpose
if not NonContigVlanIdsAcquired:
continue
else:
break
# If even through looping from all existing physical networks, failed to get relevant non
# contiguous vlan ids, then fail the test case
if not NonContigVlanIdsAcquired:
return None, None
return physical_network, vlan
def isIpInDesiredState(apiclient, ipaddressid, state):
""" Check if the given IP is in the correct state (given)
and return True/False accordingly"""
retriesCount = 10
ipInDesiredState = False
exceptionOccured = False
exceptionMessage = ""
try:
while retriesCount >= 0:
portableips = PublicIPAddress.list(apiclient, id=ipaddressid)
assert validateList(
portableips)[0] == PASS, "IPs list validation failed"
if str(portableips[0].state).lower() == state:
ipInDesiredState = True
break
retriesCount -= 1
time.sleep(60)
except Exception as e:
exceptionOccured = True
exceptionMessage = e
return [exceptionOccured, ipInDesiredState, e]
if not ipInDesiredState:
exceptionMessage = "Ip should be in %s state, it is in %s" %\
(state, portableips[0].state)
return [False, ipInDesiredState, exceptionMessage]
def setSharedNetworkParams(networkServices, range=20):
"""Fill up the services dictionary for shared network using random subnet"""
# @range: range decides the endip. Pass the range as "x" if you want the difference between the startip
# and endip as "x"
# Set the subnet number of shared networks randomly prior to execution
# of each test case to avoid overlapping of ip addresses
shared_network_subnet_number = random.randrange(1,254)
networkServices["gateway"] = "172.16."+str(shared_network_subnet_number)+".1"
networkServices["startip"] = "172.16."+str(shared_network_subnet_number)+".2"
networkServices["endip"] = "172.16."+str(shared_network_subnet_number)+"."+str(range+1)
networkServices["netmask"] = "255.255.255.0"
return networkServices
def createEnabledNetworkOffering(apiclient, networkServices):
"""Create and enable network offering according to the type
@output: List, containing [ Result,Network Offering,Reason ]
Ist Argument('Result') : FAIL : If exception or assertion error occurs
PASS : If network offering
is created and enabled successfully
IInd Argument(Net Off) : Enabled network offering
In case of exception or
assertion error, it will be None
IIIrd Argument(Reason) : Reason for failure,
default to None
"""
try:
resultSet = [FAIL, None, None]
# Create network offering
network_offering = NetworkOffering.create(apiclient, networkServices, conservemode=False)
# Update network offering state from disabled to enabled.
NetworkOffering.update(network_offering, apiclient, id=network_offering.id,
state="enabled")
except Exception as e:
resultSet[2] = e
return resultSet
return [PASS, network_offering, None]
def shouldTestBeSkipped(networkType, zoneType):
"""Decide which test to skip, according to type of network and zone type"""
# If network type is isolated or vpc and zone type is basic, then test should be skipped
skipIt = False
if ((networkType.lower() == str(ISOLATED_NETWORK).lower() or networkType.lower() == str(VPC_NETWORK).lower())
and (zoneType.lower() == BASIC_ZONE)):
skipIt = True
return skipIt
def verifyNetworkState(apiclient, networkid, state, listall=True):
"""List networks and check if the network state matches the given state"""
retriesCount = 10
isNetworkInDesiredState = False
exceptionOccured = False
exceptionMessage = ""
try:
while retriesCount >= 0:
networks = Network.list(apiclient, id=networkid, listall=listall)
assert validateList(
networks)[0] == PASS, "Networks list validation failed"
if str(networks[0].state).lower() == state:
isNetworkInDesiredState = True
break
retriesCount -= 1
time.sleep(60)
if not isNetworkInDesiredState:
exceptionMessage = "Network state should be %s, it is %s" %\
(state, networks[0].state)
except Exception as e:
exceptionOccured = True
exceptionMessage = e
return [exceptionOccured, isNetworkInDesiredState, exceptionMessage]
return [exceptionOccured, isNetworkInDesiredState, exceptionMessage]
def verifyComputeOfferingCreation(apiclient, computeofferingid):
"""List Compute offerings by ID and verify that the offering exists"""
cmd = listServiceOfferings.listServiceOfferingsCmd()
cmd.id = computeofferingid
serviceOfferings = None
try:
serviceOfferings = apiclient.listServiceOfferings(cmd)
except Exception:
return FAIL
if not (isinstance(serviceOfferings, list) and len(serviceOfferings) > 0):
return FAIL
return PASS
def createNetworkRulesForVM(apiclient, virtualmachine, ruletype,
account, networkruledata):
"""Acquire IP, create Firewall and NAT/StaticNAT rule
(associating it with given vm) for that IP"""
try:
public_ip = PublicIPAddress.create(
apiclient,accountid=account.name,
zoneid=virtualmachine.zoneid,domainid=account.domainid,
networkid=virtualmachine.nic[0].networkid)
FireWallRule.create(
apiclient,ipaddressid=public_ip.ipaddress.id,
protocol='TCP', cidrlist=[networkruledata["fwrule"]["cidr"]],
startport=networkruledata["fwrule"]["startport"],
endport=networkruledata["fwrule"]["endport"]
)
if ruletype == NAT_RULE:
# Create NAT rule
NATRule.create(apiclient, virtualmachine,
networkruledata["natrule"],ipaddressid=public_ip.ipaddress.id,
networkid=virtualmachine.nic[0].networkid)
elif ruletype == STATIC_NAT_RULE:
# Enable Static NAT for VM
StaticNATRule.enable(apiclient,public_ip.ipaddress.id,
virtualmachine.id, networkid=virtualmachine.nic[0].networkid)
except Exception as e:
[FAIL, e]
return [PASS, public_ip]
def getPortableIpRangeServices(config):
""" Reads config values related to portable ip and fills up
services accordingly"""
services = {}
attributeError = False
if config.portableIpRange.startip:
services["startip"] = config.portableIpRange.startip
else:
attributeError = True
if config.portableIpRange.endip:
services["endip"] = config.portableIpRange.endip
else:
attributeError = True
if config.portableIpRange.netmask:
services["netmask"] = config.portableIpRange.netmask
else:
attributeError = True
if config.portableIpRange.gateway:
services["gateway"] = config.portableIpRange.gateway
else:
attributeError = True
if config.portableIpRange.vlan:
services["vlan"] = config.portableIpRange.vlan
if attributeError:
services = FAILED
return services
def uploadVolume(apiclient, zoneid, account, services):
try:
# Upload the volume
volume = Volume.upload(apiclient, services["volume"],
zoneid=zoneid, account=account.name,
domainid=account.domainid, url=services["url"])
volume.wait_for_upload(apiclient)
# Check List Volume response for newly created volume
volumes = Volume.list(apiclient, id=volume.id,
zoneid=zoneid, listall=True)
validationresult = validateList(volumes)
assert validationresult[0] == PASS,\
"volumes list validation failed: %s" % validationresult[2]
assert str(volumes[0].state).lower() == "uploaded",\
"Volume state should be 'uploaded' but it is %s" % volumes[0].state
except Exception as e:
return [FAIL, e]
return [PASS, volume]
def matchResourceCount(apiclient, expectedCount, resourceType,
accountid=None, projectid=None):
"""Match the resource count of account/project with the expected
resource count"""
try:
resourceholderlist = None
if accountid:
resourceholderlist = Account.list(apiclient, id=accountid)
elif projectid:
resourceholderlist = Project.list(apiclient, id=projectid, listall=True)
validationresult = validateList(resourceholderlist)
assert validationresult[0] == PASS,\
"accounts list validation failed"
if resourceType == RESOURCE_PRIMARY_STORAGE:
resourceCount = resourceholderlist[0].primarystoragetotal
elif resourceType == RESOURCE_SECONDARY_STORAGE:
resourceCount = resourceholderlist[0].secondarystoragetotal
elif resourceType == RESOURCE_CPU:
resourceCount = resourceholderlist[0].cputotal
elif resourceType == RESOURCE_MEMORY:
resourceCount = resourceholderlist[0].memorytotal
assert str(resourceCount) == str(expectedCount),\
"Resource count %s should match with the expected resource count %s" %\
(resourceCount, expectedCount)
except Exception as e:
return [FAIL, e]
return [PASS, None]
def createSnapshotFromVirtualMachineVolume(apiclient, account, vmid):
"""Create snapshot from volume"""
try:
volumes = Volume.list(apiclient, account=account.name,
domainid=account.domainid, virtualmachineid=vmid)
validationresult = validateList(volumes)
assert validateList(volumes)[0] == PASS,\
"List volumes should return a valid response"
snapshot = Snapshot.create(apiclient, volume_id=volumes[0].id,
account=account.name, domainid=account.domainid)
snapshots = Snapshot.list(apiclient, id=snapshot.id,
listall=True)
validationresult = validateList(snapshots)
assert validationresult[0] == PASS,\
"List snapshot should return a valid list"
except Exception as e:
return[FAIL, e]
return [PASS, snapshot]
def isVmExpunged(apiclient, vmid, projectid=None, timeout=600):
"""Verify if VM is expunged or not"""
vmExpunged= False
while timeout>=0:
try:
vms = VirtualMachine.list(apiclient, id=vmid, projectid=projectid)
if vms is None:
vmExpunged = True
break
timeout -= 60
time.sleep(60)
except Exception:
vmExpunged = True
break
#end while
return vmExpunged
def isDomainResourceCountEqualToExpectedCount(apiclient, domainid, expectedcount,
resourcetype):
"""Get the resource count of specific domain and match
it with the expected count
Return list [isExceptionOccured, reasonForException, isResourceCountEqual]"""
isResourceCountEqual = False
isExceptionOccured = False
reasonForException = None
try:
response = Resources.updateCount(apiclient, domainid=domainid,
resourcetype=resourcetype)
except Exception as e:
reasonForException = "Failed while updating resource count: %s" % e
isExceptionOccured = True
return [isExceptionOccured, reasonForException, isResourceCountEqual]
resourcecount = (response[0].resourcecount / (1024**3))
if resourcecount == expectedcount:
isResourceCountEqual = True
return [isExceptionOccured, reasonForException, isResourceCountEqual]
def isNetworkDeleted(apiclient, networkid, timeout=600):
""" List the network and check that the list is empty or not"""
networkDeleted = False
while timeout >= 0:
networks = Network.list(apiclient, id=networkid)
if networks is None:
networkDeleted = True
break
timeout -= 60
time.sleep(60)
#end while
return networkDeleted
def createChecksum(service=None,
virtual_machine=None,
disk=None,
disk_type=None):
""" Calculate the MD5 checksum of the disk by writing \
data on the disk where disk_type is either root disk or data disk
@return: returns the calculated checksum"""
random_data_0 = random_gen(size=100)
# creating checksum(MD5)
m = hashlib.md5()
m.update(random_data_0)
ckecksum_random_data_0 = m.hexdigest()
try:
ssh_client = SshClient(
virtual_machine.ssh_ip,
virtual_machine.ssh_port,
virtual_machine.username,
virtual_machine.password
)
except Exception:
raise Exception("SSH access failed for server with IP address: %s" %
virtual_machine.ssh_ip)
# Format partition using ext3
format_volume_to_ext3(
ssh_client,
service["volume_write_path"][
virtual_machine.hypervisor.lower()][disk_type]
)
cmds = ["fdisk -l",
"mkdir -p %s" % service["data_write_paths"]["mount_dir"],
"mount -t ext3 %s1 %s" % (
service["volume_write_path"][
virtual_machine.hypervisor.lower()][disk_type],
service["data_write_paths"]["mount_dir"]
),
"mkdir -p %s/%s/%s " % (
service["data_write_paths"]["mount_dir"],
service["data_write_paths"]["sub_dir"],
service["data_write_paths"]["sub_lvl_dir1"],
),
"echo %s > %s/%s/%s/%s" % (
random_data_0,
service["data_write_paths"]["mount_dir"],
service["data_write_paths"]["sub_dir"],
service["data_write_paths"]["sub_lvl_dir1"],
service["data_write_paths"]["random_data"]
),
"cat %s/%s/%s/%s" % (
service["data_write_paths"]["mount_dir"],
service["data_write_paths"]["sub_dir"],
service["data_write_paths"]["sub_lvl_dir1"],
service["data_write_paths"]["random_data"]
)
]
for c in cmds:
ssh_client.execute(c)
# Unmount the storage
cmds = [
"umount %s" % (service["data_write_paths"]["mount_dir"]),
]
for c in cmds:
ssh_client.execute(c)
return ckecksum_random_data_0
def compareChecksum(
apiclient,
service=None,
original_checksum=None,
disk_type=None,
virt_machine=None
):
"""
Create md5 checksum of the data present on the disk and compare
it with the given checksum
"""
if virt_machine.state != "Running":
virt_machine.start(apiclient)
try:
# Login to VM to verify test directories and files
ssh = SshClient(
virt_machine.ssh_ip,
virt_machine.ssh_port,
virt_machine.username,
virt_machine.password
)
except Exception:
raise Exception("SSH access failed for server with IP address: %s" %
virt_machine.ssh_ip)
# Mount datadiskdevice_1 because this is the first data disk of the new
# virtual machine
cmds = ["blkid",
"fdisk -l",
"mkdir -p %s" % service["data_write_paths"]["mount_dir"],
"mount -t ext3 %s1 %s" % (
service["volume_write_path"][
virt_machine.hypervisor.lower()][disk_type],
service["data_write_paths"]["mount_dir"]
),
]
for c in cmds:
ssh.execute(c)
returned_data_0 = ssh.execute(
"cat %s/%s/%s/%s" % (
service["data_write_paths"]["mount_dir"],
service["data_write_paths"]["sub_dir"],
service["data_write_paths"]["sub_lvl_dir1"],
service["data_write_paths"]["random_data"]
))
n = hashlib.md5()
n.update(returned_data_0[0])
ckecksum_returned_data_0 = n.hexdigest()
# Verify returned data
assert original_checksum == ckecksum_returned_data_0, \
"Cheskum does not match with checksum of original data"
# Unmount the Sec Storage
cmds = [
"umount %s" % (service["data_write_paths"]["mount_dir"]),
]
for c in cmds:
ssh.execute(c)
return
def verifyRouterState(apiclient, routerid, state, listall=True):
"""List router and check if the router state matches the given state"""
retriesCount = 10
isRouterInDesiredState = False
exceptionOccured = False
exceptionMessage = ""
try:
while retriesCount >= 0:
routers = Router.list(apiclient, id=routerid, listall=listall)
assert validateList(
routers)[0] == PASS, "Routers list validation failed"
if str(routers[0].state).lower() == state:
isRouterInDesiredState = True
break
retriesCount -= 1
time.sleep(60)
if not isRouterInDesiredState:
exceptionMessage = "Router state should be %s, it is %s" %\
(state, routers[0].state)
except Exception as e:
exceptionOccured = True
exceptionMessage = e
return [exceptionOccured, isRouterInDesiredState, exceptionMessage]
return [exceptionOccured, isRouterInDesiredState, exceptionMessage]
def isIpRangeInUse(api_client, publicIpRange):
''' Check that if any Ip in the IP Range is in use
currently
'''
vmList = VirtualMachine.list(api_client,
zoneid=publicIpRange.zoneid,
listall=True)
if not vmList:
return False
for vm in vmList:
for nic in vm.nic:
publicIpAddresses = PublicIPAddress.list(api_client,
associatednetworkid=nic.networkid,
listall=True)
if validateList(publicIpAddresses)[0] == PASS:
for ipaddress in publicIpAddresses:
if IPAddress(publicIpRange.startip) <=\
IPAddress(ipaddress.ipaddress) <=\
IPAddress(publicIpRange.endip):
return True
return False
def verifyGuestTrafficPortGroups(api_client, config, setup_zone):
""" This function matches the given zone with
the zone in config file used to deploy the setup and
retrieves the corresponding vcenter details and forms
the vcenter connection object. It makes call to
verify the guest traffic for given zone """
try:
zoneDetailsInConfig = [zone for zone in config.zones
if zone.name == setup_zone.name][0]
vcenterusername = zoneDetailsInConfig.vmwaredc.username
vcenterpassword = zoneDetailsInConfig.vmwaredc.password
vcenterip = zoneDetailsInConfig.vmwaredc.vcenter
vcenterObj = Vcenter(
vcenterip,
vcenterusername,
vcenterpassword)
response = verifyVCenterPortGroups(
api_client,
vcenterObj,
traffic_types_to_validate=[
GUEST_TRAFFIC],
zoneid=setup_zone.id,
switchTypes=[VMWAREDVS])
assert response[0] == PASS, response[1]
except Exception as e:
return [FAIL, e]
return [PASS, None]
def analyzeTrafficType(trafficTypes, trafficTypeToFilter, switchTypes=None):
""" Analyze traffic types for given type and return
switch name and vlan Id from the
vmwarenetworklabel string of trafficTypeToFilter
"""
try:
filteredList = [trafficType for trafficType in trafficTypes
if trafficType.traffictype.lower() ==
trafficTypeToFilter]
if not filteredList:
return [PASS, filteredList, None, None]
# Split string with , so as to extract the switch Name and
# vlan ID
splitString = str(
filteredList[0].vmwarenetworklabel).split(",")
switchName = splitString[0]
vlanSpecified = splitString[1]
availableSwitchType = splitString[2]
if switchTypes and availableSwitchType.lower() not in switchTypes:
return [PASS, None, None, None]
return [PASS, filteredList, switchName, vlanSpecified]
except Exception as e:
return [FAIL, e, None, None]
def getExpectedPortGroupNames(
api_client,
physical_network,
network_rate,
switch_name,
traffic_types,
switch_dict,
vcenter_conn,
specified_vlan,
traffic_type):
""" Return names of expected port groups that should be
present in vcenter
Parameters:
@physical_network: Physical Network of the @traffic_type
@network_rate: as defined by network.throttling.rate
@switch_name: Name of the switch used by the traffic in
vcenter
@traffic_types: List of all traffic types present in the physical
network
@switch_dict: Dictionary containing switch information in vcenter
@vcenter_conn: vcenter connection object used to fetch information
from vcenter
@specified_vlan: The vlan for @traffic_type
@traffic_type: Traffic type for which the port names are to be
returned
Return value:
[PASS/FAIL, exception object if FAIL else expected port group names
for @traffic_type]
"""
try:
expectedDVPortGroupNames = []
if traffic_type == PUBLIC_TRAFFIC:
publicIpRanges = PublicIpRange.list(
api_client,
physicalnetworkid=physical_network.id
)
if publicIpRanges is not None:
for publicIpRange in publicIpRanges:
vlanInIpRange = re.findall(
'\d+',
str(publicIpRange.vlan))
vlanId = "untagged"
if len(vlanInIpRange) > 0:
vlanId = vlanInIpRange[0]
ipRangeInUse = isIpRangeInUse(api_client, publicIpRange)
if ipRangeInUse:
expectedDVPortGroupName = "cloud" + "." + \
PUBLIC_TRAFFIC + "." + vlanId + "." + \
network_rate + "." + "1" + "-" + \
switch_name
expectedDVPortGroupNames.append(
expectedDVPortGroupName)
expectedDVPortGroupName = "cloud" + "." + PUBLIC_TRAFFIC + "." + \
vlanId + "." + "0" + "." + "1" + "-" + switch_name
expectedDVPortGroupNames.append(expectedDVPortGroupName)
if traffic_type == GUEST_TRAFFIC:
networks = Network.list(
api_client,
physicalnetworkid=physical_network.id,
listall=True
)
if networks is not None:
for network in networks:
networkVlan = re.findall(
'\d+', str(network.vlan))
if len(networkVlan) > 0:
vlanId = networkVlan[0]
expectedDVPortGroupName = "cloud" + "." + GUEST_TRAFFIC + "." + \
vlanId + "." + network_rate + "." + "1" + "-" + \
switch_name
expectedDVPortGroupNames.append(
expectedDVPortGroupName)
if traffic_type == STORAGE_TRAFFIC:
vlanId = ""
storageIpRanges = StorageNetworkIpRange.list(
api_client,
zoneid=physical_network.zoneid
)
if storageIpRanges is not None:
for storageIpRange in storageIpRanges:
vlanInIpRange = re.findall(
'\d+',
str(storageIpRange.vlan))
if len(vlanInIpRange) > 0:
vlanId = vlanInIpRange[0]
else:
vlanId = "untagged"
expectedDVPortGroupName = "cloud" + "." + STORAGE_TRAFFIC + \
"." + vlanId + "." + "0" + "." + "1" + "-" + \
switch_name
expectedDVPortGroupNames.append(
expectedDVPortGroupName)
else:
response = analyzeTrafficType(
traffic_types, MANAGEMENT_TRAFFIC)
assert response[0] == PASS, response[1]
filteredList, switchName, vlanSpecified =\
response[1], response[2], response[3]
if not filteredList:
raise Exception("No Management traffic present and\
Storage traffic does not have any IP range,\
Invalid zone setting")
if switchName not in switch_dict:
dvswitches = vcenter_conn.get_dvswitches(
name=switchName)
switch_dict[switchName] = dvswitches[0][
'dvswitch']['portgroupNameList']
if vlanSpecified:
vlanId = vlanSpecified
else:
vlanId = "untagged"
expectedDVPortGroupName = "cloud" + "." + STORAGE_TRAFFIC + \
"." + vlanId + "." + "0" + "." + "1" + "-" + switchName
expectedDVPortGroupNames.append(expectedDVPortGroupName)
if traffic_type == MANAGEMENT_TRAFFIC:
vlanId = "untagged"
if specified_vlan:
vlanId = specified_vlan
expectedDVPortGroupName = "cloud" + "." + "private" + "." + \
vlanId + "." + "0" + "." + "1" + "-" + switch_name
expectedDVPortGroupNames.append(expectedDVPortGroupName)
except Exception as e:
return [FAIL, e]
return [PASS, expectedDVPortGroupNames]
def verifyVCenterPortGroups(
api_client,
vcenter_conn,
zoneid,
traffic_types_to_validate,
switchTypes):
""" Generate expected port groups for given traffic types and
verify they are present in the vcenter
Parameters:
@api_client: API client of root admin account
@vcenter_conn: connection object for vcenter used to fetch data
using vcenterAPI
@zone_id: Zone for which port groups are to be verified
@traffic_types_to_validate:
Traffic types (public, guest, management, storage) for
which verification is to be done
@switchTypes: The switch types for which port groups
are to be verified e.g vmwaredvs
Return value:
[PASS/FAIL, exception message if FAIL else None]
"""
try:
expectedDVPortGroupNames = []
vcenterPortGroups = []
config = Configurations.list(
api_client,
name="network.throttling.rate"
)
networkRate = config[0].value
switchDict = {}
physicalNetworks = PhysicalNetwork.list(
api_client,
zoneid=zoneid
)
# If there are no physical networks in zone, return as PASS
# as there are no validations to make
if validateList(physicalNetworks)[0] != PASS:
return [PASS, None]
for physicalNetwork in physicalNetworks:
trafficTypes = TrafficType.list(
api_client,
physicalnetworkid=physicalNetwork.id)
for trafficType in traffic_types_to_validate:
response = analyzeTrafficType(
trafficTypes, trafficType, switchTypes)
assert response[0] == PASS, response[1]
filteredList, switchName, vlanSpecified=\
response[1], response[2], response[3]
if not filteredList:
continue
if switchName not in switchDict:
dvswitches = vcenter_conn.get_dvswitches(
name=switchName)
switchDict[switchName] = dvswitches[0][
'dvswitch']['portgroupNameList']
response = getExpectedPortGroupNames(
api_client,
physicalNetwork,
networkRate,
switchName,
trafficTypes,
switchDict,
vcenter_conn,
vlanSpecified,
trafficType)
assert response[0] == PASS, response[1]
dvPortGroups = response[1]
expectedDVPortGroupNames.extend(dvPortGroups)
vcenterPortGroups = list(itertools.chain(*(switchDict.values())))
for expectedDVPortGroupName in expectedDVPortGroupNames:
assert expectedDVPortGroupName in vcenterPortGroups,\
"Port group %s not present in VCenter DataCenter" %\
expectedDVPortGroupName
except Exception as e:
return [FAIL, e]
return [PASS, None]
def migrate_router(apiclient, router_id, host_id):
cmd = migrateSystemVm.migrateSystemVmCmd()
cmd.hostid = host_id
cmd.virtualmachineid = router_id
apiclient.migrateSystemVm(cmd)
|
ikoula/cloudstack
|
tools/marvin/marvin/lib/common.py
|
Python
|
gpl-2.0
| 67,873
|
import PyQtExtras
from PyQt5.QtWidgets import QFrame, QApplication
import sys
def main(args):
app = QApplication([])
main_frame = QFrame()
list_view = PyQtExtras.ListScrollArea(main_frame)
list_view.add_item_by_string('Item 1')
list_view.add_item_by_string('Item 2')
list_view.add_item_by_string('Item 3')
list_view.remove_item_by_string('Item 1')
main_frame.show()
app.exec_()
if __name__ == '__main__':
main(sys.argv)
|
jhavstad/model_runner
|
src/ScrollListViewTest.py
|
Python
|
gpl-2.0
| 469
|
#!/usr/bin/env python3
number = 23
guess = int(input('Enter an integer : '))
if guess == number:
# 新块从这里开始
print('Congratulations, you guessed it.')
print('(but you do not win any pizzas!)')
# 新块在这里结束
elif guess < number:
# 另一代码块
print('No, it is a little higher than that')
# 你可以在此做任何你希望在该代码块内进行的事情
else:
print('No, it is a little lower than that')
# 你必须通过猜测一个大于(>)设置数的数字来到达这里
print('Done')
# 这最后一句语句将在
# if 语句执行完毕后执行。
|
pam-phy/python-notes
|
byte-of-python/if.py
|
Python
|
gpl-2.0
| 600
|
from django.test import TestCase
from django import forms
from django.forms.models import ModelForm
import unittest
from employee.forms import *
from django.test import Client
class TestBasic(unittest.TestCase):
"Basic tests"
def test_basic(self):
a = 1
self.assertEqual(1, a)
class Modelforms_test(TestCase):
def test_report(self):
form = DailyReportForm(data = {'employee': 'ravi@mp.com', 'project': 'ravi', 'report':'Sample report'})
self.assertTrue(form.is_valid())
class Views_test(TestCase):
def test_employee_report(self):
c = Client()
resp = c.get('/portal/staff/')
self.assertEqual(resp.status_code, 302)
resp = c.get('/portal/staff/reports/new/')
self.assertEqual(resp.status_code, 302)
resp = c.get('/portal/staff/reports/edit/1/')
self.assertEqual(resp.status_code, 302)
resp = c.get('/portal/staff/reports/delete/1/')
self.assertEqual(resp.status_code, 302)
|
nikhila05/MicroSite
|
employee/tests.py
|
Python
|
gpl-2.0
| 946
|
import os
import re
import oeqa.utils.ftools as ftools
from oeqa.selftest.base import oeSelfTest
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
from oeqa.utils.decorators import testcase
class BitbakeTests(oeSelfTest):
def getline(self, res, line):
for l in res.output.split('\n'):
if line in l:
return l
@testcase(789)
def test_run_bitbake_from_dir_1(self):
os.chdir(os.path.join(self.builddir, 'conf'))
self.assertEqual(bitbake('-e').status, 0, msg = "bitbake couldn't run from \"conf\" dir")
@testcase(790)
def test_run_bitbake_from_dir_2(self):
my_env = os.environ.copy()
my_env['BBPATH'] = my_env['BUILDDIR']
os.chdir(os.path.dirname(os.environ['BUILDDIR']))
self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from builddir")
@testcase(806)
def test_event_handler(self):
self.write_config("INHERIT += \"test_events\"")
result = bitbake('m4-native')
find_build_started = re.search("NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing RunQueue Tasks", result.output)
find_build_completed = re.search("Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
self.assertTrue(find_build_completed, msg = "Match failed in:\n%s" % result.output)
self.assertFalse('Test for bb.event.InvalidEvent' in result.output, msg = "\"Test for bb.event.InvalidEvent\" message found during bitbake process. bitbake output: %s" % result.output)
@testcase(103)
def test_local_sstate(self):
bitbake('m4-native')
bitbake('m4-native -cclean')
result = bitbake('m4-native')
find_setscene = re.search("m4-native.*do_.*_setscene", result.output)
self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output )
@testcase(105)
def test_bitbake_invalid_recipe(self):
result = bitbake('-b asdf', ignore_status=True)
self.assertTrue("ERROR: Unable to find any recipe file matching 'asdf'" in result.output, msg = "Though asdf recipe doesn't exist, bitbake didn't output any err. message. bitbake output: %s" % result.output)
@testcase(107)
def test_bitbake_invalid_target(self):
result = bitbake('asdf', ignore_status=True)
self.assertTrue("ERROR: Nothing PROVIDES 'asdf'" in result.output, msg = "Though no 'asdf' target exists, bitbake didn't output any err. message. bitbake output: %s" % result.output)
@testcase(106)
def test_warnings_errors(self):
result = bitbake('-b asdf', ignore_status=True)
find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages* shown", result.output)
find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages* shown", result.output)
self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output)
self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output)
@testcase(108)
def test_invalid_patch(self):
# This patch already exists in SRC_URI so adding it again will cause the
# patch to fail.
self.write_recipeinc('man', 'SRC_URI += "file://man-1.5h1-make.patch"')
self.write_config("INHERIT_remove = \"report-error\"")
result = bitbake('man -c patch', ignore_status=True)
self.delete_recipeinc('man')
bitbake('-cclean man')
line = self.getline(result, "Function failed: patch_do_patch")
self.assertTrue(line and line.startswith("ERROR:"), msg = "Repeated patch application didn't fail. bitbake output: %s" % result.output)
@testcase(1354)
def test_force_task_1(self):
# test 1 from bug 5875
test_recipe = 'zlib'
test_data = "Microsoft Made No Profit From Anyone's Zunes Yo"
bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe)
image_dir = bb_vars['D']
pkgsplit_dir = bb_vars['PKGDEST']
man_dir = bb_vars['mandir']
bitbake('-c clean %s' % test_recipe)
bitbake('-c package -f %s' % test_recipe)
self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
man_file = os.path.join(image_dir + man_dir, 'man3/zlib.3')
ftools.append_file(man_file, test_data)
bitbake('-c package -f %s' % test_recipe)
man_split_file = os.path.join(pkgsplit_dir, 'zlib-doc' + man_dir, 'man3/zlib.3')
man_split_content = ftools.read_file(man_split_file)
self.assertIn(test_data, man_split_content, 'The man file has not changed in packages-split.')
ret = bitbake(test_recipe)
self.assertIn('task do_package_write_rpm:', ret.output, 'Task do_package_write_rpm did not re-executed.')
@testcase(163)
def test_force_task_2(self):
# test 2 from bug 5875
test_recipe = 'zlib'
bitbake(test_recipe)
self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
result = bitbake('-C compile %s' % test_recipe)
look_for_tasks = ['do_compile:', 'do_install:', 'do_populate_sysroot:', 'do_package:']
for task in look_for_tasks:
self.assertIn(task, result.output, msg="Couldn't find %s task.")
@testcase(167)
def test_bitbake_g(self):
result = bitbake('-g core-image-minimal')
for f in ['pn-buildlist', 'recipe-depends.dot', 'task-depends.dot']:
self.addCleanup(os.remove, f)
self.assertTrue('Task dependencies saved to \'task-depends.dot\'' in result.output, msg = "No task dependency \"task-depends.dot\" file was generated for the given task target. bitbake output: %s" % result.output)
self.assertTrue('busybox' in ftools.read_file(os.path.join(self.builddir, 'task-depends.dot')), msg = "No \"busybox\" dependency found in task-depends.dot file.")
@testcase(899)
def test_image_manifest(self):
bitbake('core-image-minimal')
bb_vars = get_bb_vars(["DEPLOY_DIR_IMAGE", "IMAGE_LINK_NAME"], "core-image-minimal")
deploydir = bb_vars["DEPLOY_DIR_IMAGE"]
imagename = bb_vars["IMAGE_LINK_NAME"]
manifest = os.path.join(deploydir, imagename + ".manifest")
self.assertTrue(os.path.islink(manifest), msg="No manifest file created for image. It should have been created in %s" % manifest)
@testcase(168)
def test_invalid_recipe_src_uri(self):
data = 'SRC_URI = "file://invalid"'
self.write_recipeinc('man', data)
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
INHERIT_remove = \"report-error\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
bitbake('-ccleanall man')
result = bitbake('-c fetch man', ignore_status=True)
bitbake('-ccleanall man')
self.delete_recipeinc('man')
self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
self.assertTrue('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:' in result.output, msg = "\"invalid\" file \
doesn't exist, yet no error message encountered. bitbake output: %s" % result.output)
line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.')
self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \
doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
@testcase(171)
def test_rename_downloaded_file(self):
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
data = 'SRC_URI_append = ";downloadfilename=test-aspell.tar.gz"'
self.write_recipeinc('aspell', data)
bitbake('-ccleanall aspell')
result = bitbake('-c fetch aspell', ignore_status=True)
self.delete_recipeinc('aspell')
self.assertEqual(result.status, 0, msg = "Couldn't fetch aspell. %s" % result.output)
dl_dir = get_bb_var("DL_DIR")
self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz')), msg = "File rename failed. No corresponding test-aspell.tar.gz file found under %s" % dl_dir)
self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz.done')), "File rename failed. No corresponding test-aspell.tar.gz.done file found under %s" % dl_dir)
@testcase(1028)
def test_environment(self):
self.write_config("TEST_ENV=\"localconf\"")
result = runCmd('bitbake -e | grep TEST_ENV=')
self.assertTrue('localconf' in result.output, msg = "bitbake didn't report any value for TEST_ENV variable. To test, run 'bitbake -e | grep TEST_ENV='")
@testcase(1029)
def test_dry_run(self):
result = runCmd('bitbake -n m4-native')
self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
@testcase(1030)
def test_just_parse(self):
result = runCmd('bitbake -p')
self.assertEqual(0, result.status, "errors encountered when parsing recipes. %s" % result.output)
@testcase(1031)
def test_version(self):
result = runCmd('bitbake -s | grep wget')
find = re.search("wget *:([0-9a-zA-Z\.\-]+)", result.output)
self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
@testcase(1032)
def test_prefile(self):
preconf = os.path.join(self.builddir, 'conf/prefile.conf')
self.track_for_cleanup(preconf)
ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
self.assertTrue('prefile' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration. ")
self.write_config("TEST_PREFILE=\"localconf\"")
result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
self.assertTrue('localconf' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration.")
@testcase(1033)
def test_postfile(self):
postconf = os.path.join(self.builddir, 'conf/postfile.conf')
self.track_for_cleanup(postconf)
ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
self.write_config("TEST_POSTFILE=\"localconf\"")
result = runCmd('bitbake -R conf/postfile.conf -e | grep TEST_POSTFILE=')
self.assertTrue('postfile' in result.output, "Postconfigure file \"postfile.conf\"was not taken into consideration.")
@testcase(1034)
def test_checkuri(self):
result = runCmd('bitbake -c checkuri m4')
self.assertEqual(0, result.status, msg = "\"checkuri\" task was not executed. bitbake output: %s" % result.output)
@testcase(1035)
def test_continue(self):
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
INHERIT_remove = \"report-error\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
self.write_recipeinc('man',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
runCmd('bitbake -c cleanall man xcursor-transparent-theme')
result = runCmd('bitbake -c unpack -k man xcursor-transparent-theme', ignore_status=True)
errorpos = result.output.find('ERROR: Function failed: do_fail_task')
manver = re.search("NOTE: recipe xcursor-transparent-theme-(.*?): task do_unpack: Started", result.output)
continuepos = result.output.find('NOTE: recipe xcursor-transparent-theme-%s: task do_unpack: Started' % manver.group(1))
self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
@testcase(1119)
def test_non_gplv3(self):
self.write_config('INCOMPATIBLE_LICENSE = "GPLv3"')
result = bitbake('selftest-ed', ignore_status=True)
self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
lic_dir = get_bb_var('LICENSE_DIRECTORY')
self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv3')))
self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv2')))
@testcase(1422)
def test_setscene_only(self):
""" Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
test_recipe = 'ed'
bitbake(test_recipe)
bitbake('-c clean %s' % test_recipe)
ret = bitbake('--setscene-only %s' % test_recipe)
tasks = re.findall(r'task\s+(do_\S+):', ret.output)
for task in tasks:
self.assertIn('_setscene', task, 'A task different from _setscene ran: %s.\n'
'Executed tasks were: %s' % (task, str(tasks)))
@testcase(1425)
def test_bbappend_order(self):
""" Bitbake should bbappend to recipe in a predictable order """
test_recipe = 'ed'
bb_vars = get_bb_vars(['SUMMARY', 'PV'], test_recipe)
test_recipe_summary_before = bb_vars['SUMMARY']
test_recipe_pv = bb_vars['PV']
recipe_append_file = test_recipe + '_' + test_recipe_pv + '.bbappend'
expected_recipe_summary = test_recipe_summary_before
for i in range(5):
recipe_append_dir = test_recipe + '_test_' + str(i)
recipe_append_path = os.path.join(self.testlayer_path, 'recipes-test', recipe_append_dir, recipe_append_file)
os.mkdir(os.path.join(self.testlayer_path, 'recipes-test', recipe_append_dir))
feature = 'SUMMARY += "%s"\n' % i
ftools.write_file(recipe_append_path, feature)
expected_recipe_summary += ' %s' % i
self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, 'recipes-test',
test_recipe + '_test_*'))
test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe)
self.assertEqual(expected_recipe_summary, test_recipe_summary_after)
|
schleichdi2/OPENNFR-6.1-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/selftest/bbtests.py
|
Python
|
gpl-2.0
| 14,655
|
callbacks = []
def startupNotification(callback):
callbacks.append(callback)
return callback
def notify():
for callback in callbacks:
callback()
|
xfire/guppy
|
guppy/startup.py
|
Python
|
gpl-2.0
| 172
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2009 Douglas S. Blank <doug.blank@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# $Id$
register(TOOL,
id = 'SetAttribute',
name = _("Set Attribute"),
description = _("Set an attribute to a given value."),
version = '0.0.32',
gramps_target_version = "5.1",
status = STABLE, # not yet tested with python 3
fname = 'SetAttributeTool.py',
authors = ["Douglas S. Blank"],
authors_email = ["doug.blank@gmail.com"],
category = TOOL_DBPROC,
toolclass = 'SetAttributeWindow',
optionclass = 'SetAttributeOptions',
tool_modes = [TOOL_MODE_GUI],
)
|
gramps-project/addons-source
|
SetAttributeTool/SetAttributeTool.gpr.py
|
Python
|
gpl-2.0
| 1,411
|
#!/usr/bin/env python
#-*- coding: utf8 -*-
# Copyright 2009-2012 Kamil Winczek <kwinczek@gmail.com>
#
# This file is part of series.py.
#
# series.py is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# series.py is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along
# with series.py. If not, see http://www.gnu.org/licenses/.
import contextlib
import sys
import lxml.etree as etree
import shelve
import subprocess
try:
import urllib2
except ImportError:
import urllib as urllib2
import time
import tvs.show
# Spinner implementation.
@contextlib.contextmanager
def spinning_distraction(spin):
if spin:
global p
p = subprocess.Popen(['tvs_spin.py'])
yield
p.terminate()
sys.stdout.write("\r")
sys.stdout.flush()
else:
yield
# --------------------------------------------------------------------- #
# #
# Class Cache #
# #
# --------------------------------------------------------------------- #
class Cache(object):
"""
Cache implementation.
Cache is a wraper class for Show class.
It is capable of retrieving and storing data from tvrage.com.
ttl contains date upto when object is valid.
"""
def __init__(self, keyword, options):
self.keyword = keyword
self.show = None
self.options = options
self.now = time.time()
if self.options.cache:
self.c = shelve.open(self.options.cachefile)
self.i = shelve.open(self.options.cacheids)
self.url_search = "http://services.tvrage.com/feeds/search.php?show=%s" % self.keyword
self.showid = self.__get_show_id()
self.url_full_show = "http://services.tvrage.com/feeds/full_show_info.php?sid=%s" % self.showid
self.show = self.__get_show()
if self.options.debug:
print("Search URL: %s" % self.url_search)
print("Shows full URL: %s" % self.url_full_show)
def __del__(self):
"""If cache was used all files need to be closed."""
if self.options.cache:
self.c.close()
self.i.close()
def __save_id_to_cache(self, showid):
"""Saves retrieved show's id to cache"""
self.i[self.keyword] = showid
def __save_show_to_cache(self, show):
if not show:
return False
# Set TTL, add 12h (43200secs) to current time (12h TTL)
self.c[str(self.showid)] = (self.now+43200, show)
return True
def __get_id_from_cache(self):
try:
return self.i[self.keyword]
except:
return None
def __get_id_from_tvrage(self):
try:
with spinning_distraction(spin=self.options.spinner):
return etree.fromstring(urllib2.urlopen(self.url_search).read()).xpath('//Results/show/showid')[0].text
except KeyboardInterrupt:
raise
except:
return None
def __get_show_from_cache(self):
try:
return self.c[str(self.showid)]
except:
return (None, None)
def __get_show_from_tvrage(self):
try:
with spinning_distraction(spin=self.options.spinner):
return tvs.show.Show(etree.fromstring(urllib2.urlopen(self.url_full_show).read()), self.options)
except KeyboardInterrupt:
raise
except:
return None
def __get_show_id(self):
"""Returns first found id from search list. """
# Try to get id from ids cache file
if self.options.cache and not self.options.refresh:
showid = self.__get_id_from_cache()
if not showid:
showid = self.__get_id_from_tvrage()
if showid:
self.__save_id_to_cache(showid)
return showid
return showid
else:
return showid
elif self.options.refresh:
showid = self.__get_id_from_tvrage()
if showid:
self.__save_id_to_cache(showid)
return showid
elif not self.options.cache:
return self.__get_id_from_tvrage()
else:
showid = self.__get_id_from_tvrage()
if showid:
self.__save_id_to_cache(showid)
return showid
return None
def __get_show(self):
"""Returns show instance with data from tvrage."""
if self.showid == None: # Previously not found show id
return None
if self.options.cache and not self.options.refresh:
ttl, show = self.__get_show_from_cache()
if not ttl and not self.show or ttl < self.now:
show = self.__get_show_from_tvrage()
self.__save_show_to_cache(show)
elif self.options.refresh:
show = self.__get_show_from_tvrage()
self.__save_show_to_cache(show)
# If no cache to be used.
else:
show = self.__get_show_from_tvrage()
return show
def get_show(self):
return self.show
|
kwinczek/tvseries
|
tvs/cache.py
|
Python
|
gpl-2.0
| 5,742
|
"""
Read a snakefood dependencies file and output the list of all files.
"""
# This file is part of the Snakefood open source package.
# See http://furius.ca/snakefood/ for licensing details.
import sys
from os.path import join
from snakefood.depends import read_depends, flatten_depends
def main():
import optparse
parser = optparse.OptionParser(__doc__.strip())
opts, args = parser.parse_args()
depends = read_depends(sys.stdin)
for droot, drel in flatten_depends(depends):
print join(droot, drel)
|
jd23/py-deps
|
lib/python/snakefood/flatten.py
|
Python
|
gpl-2.0
| 535
|
# -*- coding: utf-8 -*-
#############################################################
# This file was automatically generated on 2022-01-18. #
# #
# Python Bindings Version 2.1.29 #
# #
# If you have a bugfix for this file and want to commit it, #
# please fix the bug in the generator. You can find a link #
# to the generators git repository on tinkerforge.com #
#############################################################
from collections import namedtuple
try:
from .ip_connection import Device, IPConnection, Error, create_char, create_char_list, create_string, create_chunk_data
except (ValueError, ImportError):
from ip_connection import Device, IPConnection, Error, create_char, create_char_list, create_string, create_chunk_data
GetSpeedRamping = namedtuple('SpeedRamping', ['acceleration', 'deacceleration'])
GetStepConfiguration = namedtuple('StepConfiguration', ['step_resolution', 'interpolation'])
GetBasicConfiguration = namedtuple('BasicConfiguration', ['standstill_current', 'motor_run_current', 'standstill_delay_time', 'power_down_time', 'stealth_threshold', 'coolstep_threshold', 'classic_threshold', 'high_velocity_chopper_mode'])
GetSpreadcycleConfiguration = namedtuple('SpreadcycleConfiguration', ['slow_decay_duration', 'enable_random_slow_decay', 'fast_decay_duration', 'hysteresis_start_value', 'hysteresis_end_value', 'sine_wave_offset', 'chopper_mode', 'comparator_blank_time', 'fast_decay_without_comparator'])
GetStealthConfiguration = namedtuple('StealthConfiguration', ['enable_stealth', 'amplitude', 'gradient', 'enable_autoscale', 'force_symmetric', 'freewheel_mode'])
GetCoolstepConfiguration = namedtuple('CoolstepConfiguration', ['minimum_stallguard_value', 'maximum_stallguard_value', 'current_up_step_width', 'current_down_step_width', 'minimum_current', 'stallguard_threshold_value', 'stallguard_mode'])
GetMiscConfiguration = namedtuple('MiscConfiguration', ['disable_short_to_ground_protection', 'synchronize_phase_frequency'])
GetDriverStatus = namedtuple('DriverStatus', ['open_load', 'short_to_ground', 'over_temperature', 'motor_stalled', 'actual_motor_current', 'full_step_active', 'stallguard_result', 'stealth_voltage_amplitude'])
GetAllData = namedtuple('AllData', ['current_velocity', 'current_position', 'remaining_steps', 'input_voltage', 'current_consumption'])
GetGPIOConfiguration = namedtuple('GPIOConfiguration', ['debounce', 'stop_deceleration'])
GetSPITFPErrorCount = namedtuple('SPITFPErrorCount', ['error_count_ack_checksum', 'error_count_message_checksum', 'error_count_frame', 'error_count_overflow'])
GetIdentity = namedtuple('Identity', ['uid', 'connected_uid', 'position', 'hardware_version', 'firmware_version', 'device_identifier'])
class BrickletSilentStepperV2(Device):
"""
Silently drives one bipolar stepper motor with up to 46V and 1.6A per phase
"""
DEVICE_IDENTIFIER = 2166
DEVICE_DISPLAY_NAME = 'Silent Stepper Bricklet 2.0'
DEVICE_URL_PART = 'silent_stepper_v2' # internal
CALLBACK_UNDER_VOLTAGE = 41
CALLBACK_POSITION_REACHED = 42
CALLBACK_ALL_DATA = 53
CALLBACK_NEW_STATE = 54
CALLBACK_GPIO_STATE = 55
FUNCTION_SET_MAX_VELOCITY = 1
FUNCTION_GET_MAX_VELOCITY = 2
FUNCTION_GET_CURRENT_VELOCITY = 3
FUNCTION_SET_SPEED_RAMPING = 4
FUNCTION_GET_SPEED_RAMPING = 5
FUNCTION_FULL_BRAKE = 6
FUNCTION_SET_CURRENT_POSITION = 7
FUNCTION_GET_CURRENT_POSITION = 8
FUNCTION_SET_TARGET_POSITION = 9
FUNCTION_GET_TARGET_POSITION = 10
FUNCTION_SET_STEPS = 11
FUNCTION_GET_STEPS = 12
FUNCTION_GET_REMAINING_STEPS = 13
FUNCTION_SET_STEP_CONFIGURATION = 14
FUNCTION_GET_STEP_CONFIGURATION = 15
FUNCTION_DRIVE_FORWARD = 16
FUNCTION_DRIVE_BACKWARD = 17
FUNCTION_STOP = 18
FUNCTION_GET_INPUT_VOLTAGE = 19
FUNCTION_SET_MOTOR_CURRENT = 22
FUNCTION_GET_MOTOR_CURRENT = 23
FUNCTION_SET_ENABLED = 24
FUNCTION_GET_ENABLED = 25
FUNCTION_SET_BASIC_CONFIGURATION = 26
FUNCTION_GET_BASIC_CONFIGURATION = 27
FUNCTION_SET_SPREADCYCLE_CONFIGURATION = 28
FUNCTION_GET_SPREADCYCLE_CONFIGURATION = 29
FUNCTION_SET_STEALTH_CONFIGURATION = 30
FUNCTION_GET_STEALTH_CONFIGURATION = 31
FUNCTION_SET_COOLSTEP_CONFIGURATION = 32
FUNCTION_GET_COOLSTEP_CONFIGURATION = 33
FUNCTION_SET_MISC_CONFIGURATION = 34
FUNCTION_GET_MISC_CONFIGURATION = 35
FUNCTION_SET_ERROR_LED_CONFIG = 36
FUNCTION_GET_ERROR_LED_CONFIG = 37
FUNCTION_GET_DRIVER_STATUS = 38
FUNCTION_SET_MINIMUM_VOLTAGE = 39
FUNCTION_GET_MINIMUM_VOLTAGE = 40
FUNCTION_SET_TIME_BASE = 43
FUNCTION_GET_TIME_BASE = 44
FUNCTION_GET_ALL_DATA = 45
FUNCTION_SET_ALL_CALLBACK_CONFIGURATION = 46
FUNCTION_GET_ALL_DATA_CALLBACK_CONFIGURATON = 47
FUNCTION_SET_GPIO_CONFIGURATION = 48
FUNCTION_GET_GPIO_CONFIGURATION = 49
FUNCTION_SET_GPIO_ACTION = 50
FUNCTION_GET_GPIO_ACTION = 51
FUNCTION_GET_GPIO_STATE = 52
FUNCTION_GET_SPITFP_ERROR_COUNT = 234
FUNCTION_SET_BOOTLOADER_MODE = 235
FUNCTION_GET_BOOTLOADER_MODE = 236
FUNCTION_SET_WRITE_FIRMWARE_POINTER = 237
FUNCTION_WRITE_FIRMWARE = 238
FUNCTION_SET_STATUS_LED_CONFIG = 239
FUNCTION_GET_STATUS_LED_CONFIG = 240
FUNCTION_GET_CHIP_TEMPERATURE = 242
FUNCTION_RESET = 243
FUNCTION_WRITE_UID = 248
FUNCTION_READ_UID = 249
FUNCTION_GET_IDENTITY = 255
STEP_RESOLUTION_1 = 8
STEP_RESOLUTION_2 = 7
STEP_RESOLUTION_4 = 6
STEP_RESOLUTION_8 = 5
STEP_RESOLUTION_16 = 4
STEP_RESOLUTION_32 = 3
STEP_RESOLUTION_64 = 2
STEP_RESOLUTION_128 = 1
STEP_RESOLUTION_256 = 0
CHOPPER_MODE_SPREAD_CYCLE = 0
CHOPPER_MODE_FAST_DECAY = 1
FREEWHEEL_MODE_NORMAL = 0
FREEWHEEL_MODE_FREEWHEELING = 1
FREEWHEEL_MODE_COIL_SHORT_LS = 2
FREEWHEEL_MODE_COIL_SHORT_HS = 3
CURRENT_UP_STEP_INCREMENT_1 = 0
CURRENT_UP_STEP_INCREMENT_2 = 1
CURRENT_UP_STEP_INCREMENT_4 = 2
CURRENT_UP_STEP_INCREMENT_8 = 3
CURRENT_DOWN_STEP_DECREMENT_1 = 0
CURRENT_DOWN_STEP_DECREMENT_2 = 1
CURRENT_DOWN_STEP_DECREMENT_8 = 2
CURRENT_DOWN_STEP_DECREMENT_32 = 3
MINIMUM_CURRENT_HALF = 0
MINIMUM_CURRENT_QUARTER = 1
STALLGUARD_MODE_STANDARD = 0
STALLGUARD_MODE_FILTERED = 1
OPEN_LOAD_NONE = 0
OPEN_LOAD_PHASE_A = 1
OPEN_LOAD_PHASE_B = 2
OPEN_LOAD_PHASE_AB = 3
SHORT_TO_GROUND_NONE = 0
SHORT_TO_GROUND_PHASE_A = 1
SHORT_TO_GROUND_PHASE_B = 2
SHORT_TO_GROUND_PHASE_AB = 3
OVER_TEMPERATURE_NONE = 0
OVER_TEMPERATURE_WARNING = 1
OVER_TEMPERATURE_LIMIT = 2
STATE_STOP = 1
STATE_ACCELERATION = 2
STATE_RUN = 3
STATE_DEACCELERATION = 4
STATE_DIRECTION_CHANGE_TO_FORWARD = 5
STATE_DIRECTION_CHANGE_TO_BACKWARD = 6
GPIO_ACTION_NONE = 0
GPIO_ACTION_NORMAL_STOP_RISING_EDGE = 1
GPIO_ACTION_NORMAL_STOP_FALLING_EDGE = 2
GPIO_ACTION_FULL_BRAKE_RISING_EDGE = 4
GPIO_ACTION_FULL_BRAKE_FALLING_EDGE = 8
GPIO_ACTION_CALLBACK_RISING_EDGE = 16
GPIO_ACTION_CALLBACK_FALLING_EDGE = 32
ERROR_LED_CONFIG_OFF = 0
ERROR_LED_CONFIG_ON = 1
ERROR_LED_CONFIG_SHOW_HEARTBEAT = 2
ERROR_LED_CONFIG_SHOW_ERROR = 3
BOOTLOADER_MODE_BOOTLOADER = 0
BOOTLOADER_MODE_FIRMWARE = 1
BOOTLOADER_MODE_BOOTLOADER_WAIT_FOR_REBOOT = 2
BOOTLOADER_MODE_FIRMWARE_WAIT_FOR_REBOOT = 3
BOOTLOADER_MODE_FIRMWARE_WAIT_FOR_ERASE_AND_REBOOT = 4
BOOTLOADER_STATUS_OK = 0
BOOTLOADER_STATUS_INVALID_MODE = 1
BOOTLOADER_STATUS_NO_CHANGE = 2
BOOTLOADER_STATUS_ENTRY_FUNCTION_NOT_PRESENT = 3
BOOTLOADER_STATUS_DEVICE_IDENTIFIER_INCORRECT = 4
BOOTLOADER_STATUS_CRC_MISMATCH = 5
STATUS_LED_CONFIG_OFF = 0
STATUS_LED_CONFIG_ON = 1
STATUS_LED_CONFIG_SHOW_HEARTBEAT = 2
STATUS_LED_CONFIG_SHOW_STATUS = 3
def __init__(self, uid, ipcon):
"""
Creates an object with the unique device ID *uid* and adds it to
the IP Connection *ipcon*.
"""
Device.__init__(self, uid, ipcon, BrickletSilentStepperV2.DEVICE_IDENTIFIER, BrickletSilentStepperV2.DEVICE_DISPLAY_NAME)
self.api_version = (2, 0, 0)
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_MAX_VELOCITY] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_MAX_VELOCITY] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_CURRENT_VELOCITY] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_SPEED_RAMPING] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_SPEED_RAMPING] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_FULL_BRAKE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_CURRENT_POSITION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_CURRENT_POSITION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_TARGET_POSITION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_TARGET_POSITION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_STEPS] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_STEPS] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_REMAINING_STEPS] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_STEP_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_STEP_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_DRIVE_FORWARD] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_DRIVE_BACKWARD] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_STOP] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_INPUT_VOLTAGE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_MOTOR_CURRENT] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_MOTOR_CURRENT] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_ENABLED] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_ENABLED] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_BASIC_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_BASIC_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_SPREADCYCLE_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_SPREADCYCLE_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_STEALTH_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_STEALTH_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_COOLSTEP_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_COOLSTEP_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_MISC_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_MISC_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_ERROR_LED_CONFIG] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_ERROR_LED_CONFIG] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_DRIVER_STATUS] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_MINIMUM_VOLTAGE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_MINIMUM_VOLTAGE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_TIME_BASE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_TIME_BASE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_ALL_DATA] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_ALL_CALLBACK_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_ALL_DATA_CALLBACK_CONFIGURATON] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_GPIO_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_GPIO_CONFIGURATION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_GPIO_ACTION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_GPIO_ACTION] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_GPIO_STATE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_SPITFP_ERROR_COUNT] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_BOOTLOADER_MODE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_BOOTLOADER_MODE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_WRITE_FIRMWARE_POINTER] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_WRITE_FIRMWARE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_SET_STATUS_LED_CONFIG] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_STATUS_LED_CONFIG] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_CHIP_TEMPERATURE] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_RESET] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_WRITE_UID] = BrickletSilentStepperV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletSilentStepperV2.FUNCTION_READ_UID] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletSilentStepperV2.FUNCTION_GET_IDENTITY] = BrickletSilentStepperV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.callback_formats[BrickletSilentStepperV2.CALLBACK_UNDER_VOLTAGE] = (10, 'H')
self.callback_formats[BrickletSilentStepperV2.CALLBACK_POSITION_REACHED] = (12, 'i')
self.callback_formats[BrickletSilentStepperV2.CALLBACK_ALL_DATA] = (22, 'H i i H H')
self.callback_formats[BrickletSilentStepperV2.CALLBACK_NEW_STATE] = (10, 'B B')
self.callback_formats[BrickletSilentStepperV2.CALLBACK_GPIO_STATE] = (9, '2!')
ipcon.add_device(self)
def set_max_velocity(self, velocity):
"""
Sets the maximum velocity of the stepper motor.
This function does *not* start the motor, it merely sets the maximum
velocity the stepper motor is accelerated to. To get the motor running use
either :func:`Set Target Position`, :func:`Set Steps`, :func:`Drive Forward` or
:func:`Drive Backward`.
"""
self.check_validity()
velocity = int(velocity)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_MAX_VELOCITY, (velocity,), 'H', 0, '')
def get_max_velocity(self):
"""
Returns the velocity as set by :func:`Set Max Velocity`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_MAX_VELOCITY, (), '', 10, 'H')
def get_current_velocity(self):
"""
Returns the *current* velocity of the stepper motor.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_CURRENT_VELOCITY, (), '', 10, 'H')
def set_speed_ramping(self, acceleration, deacceleration):
"""
Sets the acceleration and deacceleration of the stepper motor.
An acceleration of 1000 means, that
every second the velocity is increased by 1000 *steps/s*.
For example: If the current velocity is 0 and you want to accelerate to a
velocity of 8000 *steps/s* in 10 seconds, you should set an acceleration
of 800 *steps/s²*.
An acceleration/deacceleration of 0 means instantaneous
acceleration/deacceleration (not recommended)
"""
self.check_validity()
acceleration = int(acceleration)
deacceleration = int(deacceleration)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_SPEED_RAMPING, (acceleration, deacceleration), 'H H', 0, '')
def get_speed_ramping(self):
"""
Returns the acceleration and deacceleration as set by
:func:`Set Speed Ramping`.
"""
self.check_validity()
return GetSpeedRamping(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_SPEED_RAMPING, (), '', 12, 'H H'))
def full_brake(self):
"""
Executes an active full brake.
.. warning::
This function is for emergency purposes,
where an immediate brake is necessary. Depending on the current velocity and
the strength of the motor, a full brake can be quite violent.
Call :func:`Stop` if you just want to stop the motor.
"""
self.check_validity()
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_FULL_BRAKE, (), '', 0, '')
def set_current_position(self, position):
"""
Sets the current steps of the internal step counter. This can be used to
set the current position to 0 when some kind of starting position
is reached (e.g. when a CNC machine reaches a corner).
"""
self.check_validity()
position = int(position)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_CURRENT_POSITION, (position,), 'i', 0, '')
def get_current_position(self):
"""
Returns the current position of the stepper motor in steps. On startup
the position is 0. The steps are counted with all possible driving
functions (:func:`Set Target Position`, :func:`Set Steps`, :func:`Drive Forward` or
:func:`Drive Backward`). It also is possible to reset the steps to 0 or
set them to any other desired value with :func:`Set Current Position`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_CURRENT_POSITION, (), '', 12, 'i')
def set_target_position(self, position):
"""
Sets the target position of the stepper motor in steps. For example,
if the current position of the motor is 500 and :func:`Set Target Position` is
called with 1000, the stepper motor will drive 500 steps forward. It will
use the velocity, acceleration and deacceleration as set by
:func:`Set Max Velocity` and :func:`Set Speed Ramping`.
A call of :func:`Set Target Position` with the parameter *x* is equivalent to
a call of :func:`Set Steps` with the parameter
(*x* - :func:`Get Current Position`).
"""
self.check_validity()
position = int(position)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_TARGET_POSITION, (position,), 'i', 0, '')
def get_target_position(self):
"""
Returns the last target position as set by :func:`Set Target Position`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_TARGET_POSITION, (), '', 12, 'i')
def set_steps(self, steps):
"""
Sets the number of steps the stepper motor should run. Positive values
will drive the motor forward and negative values backward.
The velocity, acceleration and deacceleration as set by
:func:`Set Max Velocity` and :func:`Set Speed Ramping` will be used.
"""
self.check_validity()
steps = int(steps)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_STEPS, (steps,), 'i', 0, '')
def get_steps(self):
"""
Returns the last steps as set by :func:`Set Steps`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_STEPS, (), '', 12, 'i')
def get_remaining_steps(self):
"""
Returns the remaining steps of the last call of :func:`Set Steps`.
For example, if :func:`Set Steps` is called with 2000 and
:func:`Get Remaining Steps` is called after the motor has run for 500 steps,
it will return 1500.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_REMAINING_STEPS, (), '', 12, 'i')
def set_step_configuration(self, step_resolution, interpolation):
"""
Sets the step resolution from full-step up to 1/256-step.
If interpolation is turned on, the Silent Stepper Bricklet 2.0 will always interpolate
your step inputs as 1/256-step. If you use full-step mode with interpolation, each
step will generate 256 1/256 steps.
For maximum torque use full-step without interpolation. For maximum resolution use
1/256-step. Turn interpolation on to make the Stepper driving less noisy.
If you often change the speed with high acceleration you should turn the
interpolation off.
"""
self.check_validity()
step_resolution = int(step_resolution)
interpolation = bool(interpolation)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_STEP_CONFIGURATION, (step_resolution, interpolation), 'B !', 0, '')
def get_step_configuration(self):
"""
Returns the step mode as set by :func:`Set Step Configuration`.
"""
self.check_validity()
return GetStepConfiguration(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_STEP_CONFIGURATION, (), '', 10, 'B !'))
def drive_forward(self):
"""
Drives the stepper motor forward until :func:`Drive Backward` or
:func:`Stop` is called. The velocity, acceleration and deacceleration as
set by :func:`Set Max Velocity` and :func:`Set Speed Ramping` will be used.
"""
self.check_validity()
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_DRIVE_FORWARD, (), '', 0, '')
def drive_backward(self):
"""
Drives the stepper motor backward until :func:`Drive Forward` or
:func:`Stop` is triggered. The velocity, acceleration and deacceleration as
set by :func:`Set Max Velocity` and :func:`Set Speed Ramping` will be used.
"""
self.check_validity()
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_DRIVE_BACKWARD, (), '', 0, '')
def stop(self):
"""
Stops the stepper motor with the deacceleration as set by
:func:`Set Speed Ramping`.
"""
self.check_validity()
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_STOP, (), '', 0, '')
def get_input_voltage(self):
"""
Returns the external input voltage. The external input voltage is
given via the black power input connector on the Silent Stepper Bricklet 2.0.
If there is an external input voltage and a stack input voltage, the motor
will be driven by the external input voltage. If there is only a stack
voltage present, the motor will be driven by this voltage.
.. warning::
This means, if you have a high stack voltage and a low external voltage,
the motor will be driven with the low external voltage. If you then remove
the external connection, it will immediately be driven by the high
stack voltage
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_INPUT_VOLTAGE, (), '', 10, 'H')
def set_motor_current(self, current):
"""
Sets the current with which the motor will be driven.
.. warning::
Do not set this value above the specifications of your stepper motor.
Otherwise it may damage your motor.
"""
self.check_validity()
current = int(current)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_MOTOR_CURRENT, (current,), 'H', 0, '')
def get_motor_current(self):
"""
Returns the current as set by :func:`Set Motor Current`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_MOTOR_CURRENT, (), '', 10, 'H')
def set_enabled(self, enabled):
"""
Enables/Disables the driver chip. The driver parameters can be configured (maximum velocity,
acceleration, etc) before it is enabled.
.. warning::
Disabling the driver chip while the motor is still turning can damage the
driver chip. The motor should be stopped calling :func:`Stop` function
before disabling the motor power. The :func:`Stop` function will **not**
wait until the motor is actually stopped. You have to explicitly wait for the
appropriate time after calling the :func:`Stop` function before calling
the :func:`Set Enabled` with false function.
"""
self.check_validity()
enabled = bool(enabled)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_ENABLED, (enabled,), '!', 0, '')
def get_enabled(self):
"""
Returns *true* if the stepper driver is enabled, *false* otherwise.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_ENABLED, (), '', 9, '!')
def set_basic_configuration(self, standstill_current, motor_run_current, standstill_delay_time, power_down_time, stealth_threshold, coolstep_threshold, classic_threshold, high_velocity_chopper_mode):
"""
Sets the basic configuration parameters for the different modes (Stealth, Coolstep, Classic).
* Standstill Current: This value can be used to lower the current during stand still. This might
be reasonable to reduce the heating of the motor and the Bricklet 2.0. When the motor is in standstill
the configured motor phase current will be driven until the configured
Power Down Time is elapsed. After that the phase current will be reduced to the standstill
current. The elapsed time for this reduction can be configured with the Standstill Delay Time.
The maximum allowed value is the configured maximum motor current
(see :func:`Set Motor Current`).
* Motor Run Current: The value sets the motor current when the motor is running.
Use a value of at least one half of the global maximum motor current for a good
microstep performance. The maximum allowed value is the current
motor current. The API maps the entered value to 1/32 ... 32/32 of the maximum
motor current. This value should be used to change the motor current during motor movement,
whereas the global maximum motor current should not be changed while the motor is moving
(see :func:`Set Motor Current`).
* Standstill Delay Time: Controls the duration for motor power down after a motion
as soon as standstill is detected and the Power Down Time is expired. A high Standstill Delay
Time results in a smooth transition that avoids motor jerk during power down.
* Power Down Time: Sets the delay time after a stand still.
* Stealth Threshold: Sets the upper threshold for Stealth mode.
If the velocity of the motor goes above this value, Stealth mode is turned
off. Otherwise it is turned on. In Stealth mode the torque declines with high speed.
* Coolstep Threshold: Sets the lower threshold for Coolstep mode.
The Coolstep Threshold needs to be above the Stealth Threshold.
* Classic Threshold: Sets the lower threshold for classic mode.
In classic mode the stepper becomes more noisy, but the torque is maximized.
* High Velocity Chopper Mode: If High Velocity Chopper Mode is enabled, the stepper control
is optimized to run the stepper motors at high velocities.
If you want to use all three thresholds make sure that
Stealth Threshold < Coolstep Threshold < Classic Threshold.
"""
self.check_validity()
standstill_current = int(standstill_current)
motor_run_current = int(motor_run_current)
standstill_delay_time = int(standstill_delay_time)
power_down_time = int(power_down_time)
stealth_threshold = int(stealth_threshold)
coolstep_threshold = int(coolstep_threshold)
classic_threshold = int(classic_threshold)
high_velocity_chopper_mode = bool(high_velocity_chopper_mode)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_BASIC_CONFIGURATION, (standstill_current, motor_run_current, standstill_delay_time, power_down_time, stealth_threshold, coolstep_threshold, classic_threshold, high_velocity_chopper_mode), 'H H H H H H H !', 0, '')
def get_basic_configuration(self):
"""
Returns the configuration as set by :func:`Set Basic Configuration`.
"""
self.check_validity()
return GetBasicConfiguration(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_BASIC_CONFIGURATION, (), '', 23, 'H H H H H H H !'))
def set_spreadcycle_configuration(self, slow_decay_duration, enable_random_slow_decay, fast_decay_duration, hysteresis_start_value, hysteresis_end_value, sine_wave_offset, chopper_mode, comparator_blank_time, fast_decay_without_comparator):
"""
Note: If you don't know what any of this means you can very likely keep all of
the values as default!
Sets the Spreadcycle configuration parameters. Spreadcycle is a chopper algorithm which actively
controls the motor current flow. More information can be found in the TMC2130 datasheet on page
47 (7 spreadCycle and Classic Chopper).
* Slow Decay Duration: Controls duration of off time setting of slow decay phase.
0 = driver disabled, all bridges off. Use 1 only with Comparator Blank time >= 2.
* Enable Random Slow Decay: Set to false to fix chopper off time as set by Slow Decay Duration.
If you set it to true, Decay Duration is randomly modulated.
* Fast Decay Duration: Sets the fast decay duration. This parameters is
only used if the Chopper Mode is set to Fast Decay.
* Hysteresis Start Value: Sets the hysteresis start value. This parameter is
only used if the Chopper Mode is set to Spread Cycle.
* Hysteresis End Value: Sets the hysteresis end value. This parameter is
only used if the Chopper Mode is set to Spread Cycle.
* Sine Wave Offset: Sets the sine wave offset. This parameters is
only used if the Chopper Mode is set to Fast Decay. 1/512 of the value becomes added to the absolute
value of the sine wave.
* Chopper Mode: 0 = Spread Cycle, 1 = Fast Decay.
* Comparator Blank Time: Sets the blank time of the comparator. Available values are
* 0 = 16 clocks,
* 1 = 24 clocks,
* 2 = 36 clocks and
* 3 = 54 clocks.
A value of 1 or 2 is recommended for most applications.
* Fast Decay Without Comparator: If set to true the current comparator usage for termination of the
fast decay cycle is disabled.
"""
self.check_validity()
slow_decay_duration = int(slow_decay_duration)
enable_random_slow_decay = bool(enable_random_slow_decay)
fast_decay_duration = int(fast_decay_duration)
hysteresis_start_value = int(hysteresis_start_value)
hysteresis_end_value = int(hysteresis_end_value)
sine_wave_offset = int(sine_wave_offset)
chopper_mode = int(chopper_mode)
comparator_blank_time = int(comparator_blank_time)
fast_decay_without_comparator = bool(fast_decay_without_comparator)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_SPREADCYCLE_CONFIGURATION, (slow_decay_duration, enable_random_slow_decay, fast_decay_duration, hysteresis_start_value, hysteresis_end_value, sine_wave_offset, chopper_mode, comparator_blank_time, fast_decay_without_comparator), 'B ! B B b b B B !', 0, '')
def get_spreadcycle_configuration(self):
"""
Returns the configuration as set by :func:`Set Basic Configuration`.
"""
self.check_validity()
return GetSpreadcycleConfiguration(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_SPREADCYCLE_CONFIGURATION, (), '', 17, 'B ! B B b b B B !'))
def set_stealth_configuration(self, enable_stealth, amplitude, gradient, enable_autoscale, force_symmetric, freewheel_mode):
"""
Note: If you don't know what any of this means you can very likely keep all of
the values as default!
Sets the configuration relevant for Stealth mode.
* Enable Stealth: If set to true the stealth mode is enabled, if set to false the
stealth mode is disabled, even if the speed is below the threshold set in :func:`Set Basic Configuration`.
* Amplitude: If autoscale is disabled, the PWM amplitude is scaled by this value. If autoscale is enabled,
this value defines the maximum PWM amplitude change per half wave.
* Gradient: If autoscale is disabled, the PWM gradient is scaled by this value. If autoscale is enabled,
this value defines the maximum PWM gradient. With autoscale a value above 64 is recommended,
otherwise the regulation might not be able to measure the current.
* Enable Autoscale: If set to true, automatic current control is used. Otherwise the user defined
amplitude and gradient are used.
* Force Symmetric: If true, A symmetric PWM cycle is enforced. Otherwise the PWM value may change within each
PWM cycle.
* Freewheel Mode: The freewheel mode defines the behavior in stand still if the Standstill Current
(see :func:`Set Basic Configuration`) is set to 0.
"""
self.check_validity()
enable_stealth = bool(enable_stealth)
amplitude = int(amplitude)
gradient = int(gradient)
enable_autoscale = bool(enable_autoscale)
force_symmetric = bool(force_symmetric)
freewheel_mode = int(freewheel_mode)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_STEALTH_CONFIGURATION, (enable_stealth, amplitude, gradient, enable_autoscale, force_symmetric, freewheel_mode), '! B B ! ! B', 0, '')
def get_stealth_configuration(self):
"""
Returns the configuration as set by :func:`Set Stealth Configuration`.
"""
self.check_validity()
return GetStealthConfiguration(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_STEALTH_CONFIGURATION, (), '', 14, '! B B ! ! B'))
def set_coolstep_configuration(self, minimum_stallguard_value, maximum_stallguard_value, current_up_step_width, current_down_step_width, minimum_current, stallguard_threshold_value, stallguard_mode):
"""
Note: If you don't know what any of this means you can very likely keep all of
the values as default!
Sets the configuration relevant for Coolstep.
* Minimum Stallguard Value: If the Stallguard result falls below this value*32, the motor current
is increased to reduce motor load angle. A value of 0 turns Coolstep off.
* Maximum Stallguard Value: If the Stallguard result goes above
(Min Stallguard Value + Max Stallguard Value + 1) * 32, the motor current is decreased to save
energy.
* Current Up Step Width: Sets the up step increment per Stallguard value. The value range is 0-3,
corresponding to the increments 1, 2, 4 and 8.
* Current Down Step Width: Sets the down step decrement per Stallguard value. The value range is 0-3,
corresponding to the decrements 1, 2, 8 and 16.
* Minimum Current: Sets the minimum current for Coolstep current control. You can choose between
half and quarter of the run current.
* Stallguard Threshold Value: Sets the level for stall output (see :func:`Get Driver Status`).
A lower value gives a higher sensitivity. You have to find a suitable value for your
motor by trial and error, 0 works for most motors.
* Stallguard Mode: Set to 0 for standard resolution or 1 for filtered mode. In filtered mode the Stallguard
signal will be updated every four full-steps.
"""
self.check_validity()
minimum_stallguard_value = int(minimum_stallguard_value)
maximum_stallguard_value = int(maximum_stallguard_value)
current_up_step_width = int(current_up_step_width)
current_down_step_width = int(current_down_step_width)
minimum_current = int(minimum_current)
stallguard_threshold_value = int(stallguard_threshold_value)
stallguard_mode = int(stallguard_mode)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_COOLSTEP_CONFIGURATION, (minimum_stallguard_value, maximum_stallguard_value, current_up_step_width, current_down_step_width, minimum_current, stallguard_threshold_value, stallguard_mode), 'B B B B B b B', 0, '')
def get_coolstep_configuration(self):
"""
Returns the configuration as set by :func:`Set Coolstep Configuration`.
"""
self.check_validity()
return GetCoolstepConfiguration(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_COOLSTEP_CONFIGURATION, (), '', 15, 'B B B B B b B'))
def set_misc_configuration(self, disable_short_to_ground_protection, synchronize_phase_frequency):
"""
Note: If you don't know what any of this means you can very likely keep all of
the values as default!
Sets miscellaneous configuration parameters.
* Disable Short To Ground Protection: Set to false to enable short to ground protection, otherwise
it is disabled.
* Synchronize Phase Frequency: With this parameter you can synchronize the chopper for both phases
of a two phase motor to avoid the occurrence of a beat. The value range is 0-15. If set to 0,
the synchronization is turned off. Otherwise the synchronization is done through the formula
f_sync = f_clk/(value*64). In Classic Mode the synchronization is automatically switched off.
f_clk is 12.8MHz.
"""
self.check_validity()
disable_short_to_ground_protection = bool(disable_short_to_ground_protection)
synchronize_phase_frequency = int(synchronize_phase_frequency)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_MISC_CONFIGURATION, (disable_short_to_ground_protection, synchronize_phase_frequency), '! B', 0, '')
def get_misc_configuration(self):
"""
Returns the configuration as set by :func:`Set Misc Configuration`.
"""
self.check_validity()
return GetMiscConfiguration(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_MISC_CONFIGURATION, (), '', 10, '! B'))
def set_error_led_config(self, config):
"""
Configures the error LED to be either turned off, turned on, blink in
heartbeat mode or show an error.
If the LED is configured to show errors it has three different states:
* Off: No error present.
* 250ms interval blink: Overtemperature warning.
* 1s interval blink: Input voltage too small.
* full red: motor disabled because of short to ground in phase a or b or because of overtemperature.
"""
self.check_validity()
config = int(config)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_ERROR_LED_CONFIG, (config,), 'B', 0, '')
def get_error_led_config(self):
"""
Returns the LED configuration as set by :func:`Set Error LED Config`
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_ERROR_LED_CONFIG, (), '', 9, 'B')
def get_driver_status(self):
"""
Returns the current driver status.
* Open Load: Indicates if an open load is present on phase A, B or both. This could mean that there is a problem
with the wiring of the motor. False detection can occur in fast motion as well as during stand still.
* Short To Ground: Indicates if a short to ground is present on phase A, B or both. If this is detected the driver
automatically becomes disabled and stays disabled until it is enabled again manually.
* Over Temperature: The over temperature indicator switches to "Warning" if the driver IC warms up. The warning flag
is expected during long duration stepper uses. If the temperature limit is reached the indicator switches
to "Limit". In this case the driver becomes disabled until it cools down again.
* Motor Stalled: Is true if a motor stall was detected.
* Actual Motor Current: Indicates the actual current control scaling as used in Coolstep mode.
It represents a multiplier of 1/32 to 32/32 of the
``Motor Run Current`` as set by :func:`Set Basic Configuration`. Example: If a ``Motor Run Current``
of 1000mA was set and the returned value is 15, the ``Actual Motor Current`` is 16/32*1000mA = 500mA.
* Stallguard Result: Indicates the load of the motor. A lower value signals a higher load. Per trial and error
you can find out which value corresponds to a suitable torque for the velocity used in your application.
After that you can use this threshold value to find out if a motor stall becomes probable and react on it (e.g.
decrease velocity).
During stand still this value can not be used for stall detection, it shows the chopper on-time for motor coil A.
* Stealth Voltage Amplitude: Shows the actual PWM scaling. In Stealth mode it can be used to detect motor load and
stall if autoscale is enabled (see :func:`Set Stealth Configuration`).
"""
self.check_validity()
return GetDriverStatus(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_DRIVER_STATUS, (), '', 16, 'B B B ! B ! B B'))
def set_minimum_voltage(self, voltage):
"""
Sets the minimum voltage, below which the :cb:`Under Voltage` callback
is triggered. The minimum possible value that works with the Silent Stepper
Bricklet 2.0 is 8V.
You can use this function to detect the discharge of a battery that is used
to drive the stepper motor. If you have a fixed power supply, you likely do
not need this functionality.
"""
self.check_validity()
voltage = int(voltage)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_MINIMUM_VOLTAGE, (voltage,), 'H', 0, '')
def get_minimum_voltage(self):
"""
Returns the minimum voltage as set by :func:`Set Minimum Voltage`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_MINIMUM_VOLTAGE, (), '', 10, 'H')
def set_time_base(self, time_base):
"""
Sets the time base of the velocity and the acceleration of the Silent Stepper
Bricklet 2.0.
For example, if you want to make one step every 1.5 seconds, you can set
the time base to 15 and the velocity to 10. Now the velocity is
10steps/15s = 1steps/1.5s.
"""
self.check_validity()
time_base = int(time_base)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_TIME_BASE, (time_base,), 'I', 0, '')
def get_time_base(self):
"""
Returns the time base as set by :func:`Set Time Base`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_TIME_BASE, (), '', 12, 'I')
def get_all_data(self):
"""
Returns the following parameters: The current velocity,
the current position, the remaining steps, the stack voltage, the external
voltage and the current consumption of the stepper motor.
The current consumption is calculated by multiplying the ``Actual Motor Current``
value (see :func:`Set Basic Configuration`) with the ``Motor Run Current``
(see :func:`Get Driver Status`). This is an internal calculation of the
driver, not an independent external measurement.
The current consumption calculation was broken up to firmware 2.0.1, it is fixed
since firmware 2.0.2.
There is also a callback for this function, see :cb:`All Data` callback.
"""
self.check_validity()
return GetAllData(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_ALL_DATA, (), '', 22, 'H i i H H'))
def set_all_callback_configuration(self, period):
"""
Sets the period with which the :cb:`All Data` callback is triggered
periodically. A value of 0 turns the callback off.
"""
self.check_validity()
period = int(period)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_ALL_CALLBACK_CONFIGURATION, (period,), 'I', 0, '')
def get_all_data_callback_configuraton(self):
"""
Returns the period as set by :func:`Set All Callback Configuration`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_ALL_DATA_CALLBACK_CONFIGURATON, (), '', 12, 'I')
def set_gpio_configuration(self, channel, debounce, stop_deceleration):
"""
Sets the GPIO configuration for the given channel.
You can configure a debounce and the deceleration that is used if the action is
configured as ``normal stop``. See :func:`Set GPIO Action`.
"""
self.check_validity()
channel = int(channel)
debounce = int(debounce)
stop_deceleration = int(stop_deceleration)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_GPIO_CONFIGURATION, (channel, debounce, stop_deceleration), 'B H H', 0, '')
def get_gpio_configuration(self, channel):
"""
Returns the GPIO configuration for a channel as set by :func:`Set GPIO Configuration`.
"""
self.check_validity()
channel = int(channel)
return GetGPIOConfiguration(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_GPIO_CONFIGURATION, (channel,), 'B', 12, 'H H'))
def set_gpio_action(self, channel, action):
"""
Sets the GPIO action for the given channel.
The action can be a normal stop, a full brake or a callback. Each for a rising
edge or falling edge. The actions are a bitmask they can be used at the same time.
You can for example trigger a full brake and a callback at the same time or for
rising and falling edge.
The deceleration speed for the normal stop can be configured with
:func:`Set GPIO Configuration`.
"""
self.check_validity()
channel = int(channel)
action = int(action)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_GPIO_ACTION, (channel, action), 'B I', 0, '')
def get_gpio_action(self, channel):
"""
Returns the GPIO action for a channel as set by :func:`Set GPIO Action`.
"""
self.check_validity()
channel = int(channel)
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_GPIO_ACTION, (channel,), 'B', 12, 'I')
def get_gpio_state(self):
"""
Returns the GPIO state for both channels. True if the state is ``high`` and
false if the state is ``low``.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_GPIO_STATE, (), '', 9, '2!')
def get_spitfp_error_count(self):
"""
Returns the error count for the communication between Brick and Bricklet.
The errors are divided into
* ACK checksum errors,
* message checksum errors,
* framing errors and
* overflow errors.
The errors counts are for errors that occur on the Bricklet side. All
Bricks have a similar function that returns the errors on the Brick side.
"""
self.check_validity()
return GetSPITFPErrorCount(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_SPITFP_ERROR_COUNT, (), '', 24, 'I I I I'))
def set_bootloader_mode(self, mode):
"""
Sets the bootloader mode and returns the status after the requested
mode change was instigated.
You can change from bootloader mode to firmware mode and vice versa. A change
from bootloader mode to firmware mode will only take place if the entry function,
device identifier and CRC are present and correct.
This function is used by Brick Viewer during flashing. It should not be
necessary to call it in a normal user program.
"""
self.check_validity()
mode = int(mode)
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')
def get_bootloader_mode(self):
"""
Returns the current bootloader mode, see :func:`Set Bootloader Mode`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_BOOTLOADER_MODE, (), '', 9, 'B')
def set_write_firmware_pointer(self, pointer):
"""
Sets the firmware pointer for :func:`Write Firmware`. The pointer has
to be increased by chunks of size 64. The data is written to flash
every 4 chunks (which equals to one page of size 256).
This function is used by Brick Viewer during flashing. It should not be
necessary to call it in a normal user program.
"""
self.check_validity()
pointer = int(pointer)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_WRITE_FIRMWARE_POINTER, (pointer,), 'I', 0, '')
def write_firmware(self, data):
"""
Writes 64 Bytes of firmware at the position as written by
:func:`Set Write Firmware Pointer` before. The firmware is written
to flash every 4 chunks.
You can only write firmware in bootloader mode.
This function is used by Brick Viewer during flashing. It should not be
necessary to call it in a normal user program.
"""
self.check_validity()
data = list(map(int, data))
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_WRITE_FIRMWARE, (data,), '64B', 9, 'B')
def set_status_led_config(self, config):
"""
Sets the status LED configuration. By default the LED shows
communication traffic between Brick and Bricklet, it flickers once
for every 10 received data packets.
You can also turn the LED permanently on/off or show a heartbeat.
If the Bricklet is in bootloader mode, the LED is will show heartbeat by default.
"""
self.check_validity()
config = int(config)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_SET_STATUS_LED_CONFIG, (config,), 'B', 0, '')
def get_status_led_config(self):
"""
Returns the configuration as set by :func:`Set Status LED Config`
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_STATUS_LED_CONFIG, (), '', 9, 'B')
def get_chip_temperature(self):
"""
Returns the temperature as measured inside the microcontroller. The
value returned is not the ambient temperature!
The temperature is only proportional to the real temperature and it has bad
accuracy. Practically it is only useful as an indicator for
temperature changes.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_CHIP_TEMPERATURE, (), '', 10, 'h')
def reset(self):
"""
Calling this function will reset the Bricklet. All configurations
will be lost.
After a reset you have to create new device objects,
calling functions on the existing ones will result in
undefined behavior!
"""
self.check_validity()
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_RESET, (), '', 0, '')
def write_uid(self, uid):
"""
Writes a new UID into flash. If you want to set a new UID
you have to decode the Base58 encoded UID string into an
integer first.
We recommend that you use Brick Viewer to change the UID.
"""
self.check_validity()
uid = int(uid)
self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_WRITE_UID, (uid,), 'I', 0, '')
def read_uid(self):
"""
Returns the current UID as an integer. Encode as
Base58 to get the usual string version.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_READ_UID, (), '', 12, 'I')
def get_identity(self):
"""
Returns the UID, the UID where the Bricklet is connected to,
the position, the hardware and firmware version as well as the
device identifier.
The position can be 'a', 'b', 'c', 'd', 'e', 'f', 'g' or 'h' (Bricklet Port).
A Bricklet connected to an :ref:`Isolator Bricklet <isolator_bricklet>` is always at
position 'z'.
The device identifier numbers can be found :ref:`here <device_identifier>`.
|device_identifier_constant|
"""
return GetIdentity(*self.ipcon.send_request(self, BrickletSilentStepperV2.FUNCTION_GET_IDENTITY, (), '', 33, '8s 8s c 3B 3B H'))
def register_callback(self, callback_id, function):
"""
Registers the given *function* with the given *callback_id*.
"""
if function is None:
self.registered_callbacks.pop(callback_id, None)
else:
self.registered_callbacks[callback_id] = function
SilentStepperV2 = BrickletSilentStepperV2 # for backward compatibility
|
Tinkerforge/brickv
|
src/brickv/bindings/bricklet_silent_stepper_v2.py
|
Python
|
gpl-2.0
| 57,690
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "QiuDaBao.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
sysuccc/QiuDaBao
|
manage.py
|
Python
|
gpl-2.0
| 251
|
# ----------------------------- #
# Common simplifications passes #
# ----------------------------- #
from miasm2.expression.modint import mod_size2int, mod_size2uint
from miasm2.expression.expression import *
from miasm2.expression.expression_helper import *
def simp_cst_propagation(e_s, e):
"""This passe includes:
- Constant folding
- Common logical identities
- Common binary identities
"""
# merge associatif op
args = list(e.args)
op = e.op
# simpl integer manip
# int OP int => int
# TODO: <<< >>> << >> are architecture dependant
if op in op_propag_cst:
while (len(args) >= 2 and
args[-1].is_int() and
args[-2].is_int()):
i2 = args.pop()
i1 = args.pop()
if op == '+':
o = i1.arg + i2.arg
elif op == '*':
o = i1.arg * i2.arg
elif op == '**':
o =i1.arg ** i2.arg
elif op == '^':
o = i1.arg ^ i2.arg
elif op == '&':
o = i1.arg & i2.arg
elif op == '|':
o = i1.arg | i2.arg
elif op == '>>':
o = i1.arg >> i2.arg
elif op == '<<':
o = i1.arg << i2.arg
elif op == 'a>>':
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 >> x2)
elif op == '>>>':
o = (i1.arg >> (i2.arg % i2.size) |
i1.arg << ((i1.size - i2.arg) % i2.size))
elif op == '<<<':
o = (i1.arg << (i2.arg % i2.size) |
i1.arg >> ((i1.size - i2.arg) % i2.size))
elif op == '/':
o = i1.arg / i2.arg
elif op == '%':
o = i1.arg % i2.arg
elif op == 'idiv':
assert(i2.arg.arg)
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 / x2)
elif op == 'imod':
assert(i2.arg.arg)
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 % x2)
elif op == 'umod':
assert(i2.arg.arg)
x1 = mod_size2uint[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 % x2)
elif op == 'udiv':
assert(i2.arg.arg)
x1 = mod_size2uint[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 / x2)
o = ExprInt(o, i1.size)
args.append(o)
# bsf(int) => int
if op == "bsf" and args[0].is_int() and args[0].arg != 0:
i = 0
while args[0].arg & (1 << i) == 0:
i += 1
return ExprInt(i, args[0].size)
# bsr(int) => int
if op == "bsr" and args[0].is_int() and args[0].arg != 0:
i = args[0].size - 1
while args[0].arg & (1 << i) == 0:
i -= 1
return ExprInt(i, args[0].size)
# -(-(A)) => A
if (op == '-' and len(args) == 1 and args[0].is_op('-') and
len(args[0].args) == 1):
return args[0].args[0]
# -(int) => -int
if op == '-' and len(args) == 1 and args[0].is_int():
return ExprInt(-int(args[0]), e.size)
# A op 0 =>A
if op in ['+', '|', "^", "<<", ">>", "<<<", ">>>"] and len(args) > 1:
if args[-1].is_int(0):
args.pop()
# A - 0 =>A
if op == '-' and len(args) > 1 and args[-1].is_int(0):
assert(len(args) == 2) # Op '-' with more than 2 args: SantityCheckError
return args[0]
# A * 1 =>A
if op == "*" and len(args) > 1 and args[-1].is_int(1):
args.pop()
# for cannon form
# A * -1 => - A
if op == "*" and len(args) > 1 and args[-1].is_int((1 << args[-1].size) - 1):
args.pop()
args[-1] = - args[-1]
# op A => A
if op in ['+', '*', '^', '&', '|', '>>', '<<',
'a>>', '<<<', '>>>', 'idiv', 'imod', 'umod', 'udiv'] and len(args) == 1:
return args[0]
# A-B => A + (-B)
if op == '-' and len(args) > 1:
if len(args) > 2:
raise ValueError(
'sanity check fail on expr -: should have one or 2 args ' +
'%r %s' % (e, e))
return ExprOp('+', args[0], -args[1])
# A op 0 => 0
if op in ['&', "*"] and args[1].is_int(0):
return ExprInt(0, e.size)
# - (A + B +...) => -A + -B + -C
if op == '-' and len(args) == 1 and args[0].is_op('+'):
args = [-a for a in args[0].args]
e = ExprOp('+', *args)
return e
# -(a?int1:int2) => (a?-int1:-int2)
if (op == '-' and len(args) == 1 and
args[0].is_cond() and
args[0].src1.is_int() and args[0].src2.is_int()):
i1 = args[0].src1
i2 = args[0].src2
i1 = ExprInt(-i1.arg, i1.size)
i2 = ExprInt(-i2.arg, i2.size)
return ExprCond(args[0].cond, i1, i2)
i = 0
while i < len(args) - 1:
j = i + 1
while j < len(args):
# A ^ A => 0
if op == '^' and args[i] == args[j]:
args[i] = ExprInt(0, args[i].size)
del(args[j])
continue
# A + (- A) => 0
if op == '+' and args[j].is_op("-"):
if len(args[j].args) == 1 and args[i] == args[j].args[0]:
args[i] = ExprInt(0, args[i].size)
del(args[j])
continue
# (- A) + A => 0
if op == '+' and args[i].is_op("-"):
if len(args[i].args) == 1 and args[j] == args[i].args[0]:
args[i] = ExprInt(0, args[i].size)
del(args[j])
continue
# A | A => A
if op == '|' and args[i] == args[j]:
del(args[j])
continue
# A & A => A
if op == '&' and args[i] == args[j]:
del(args[j])
continue
j += 1
i += 1
if op in ['|', '&', '%', '/', '**'] and len(args) == 1:
return args[0]
# A <<< A.size => A
if (op in ['<<<', '>>>'] and
args[1].is_int() and
args[1].arg == args[0].size):
return args[0]
# A <<< X <<< Y => A <<< (X+Y) (ou <<< >>>)
if (op in ['<<<', '>>>'] and
args[0].is_op() and
args[0].op in ['<<<', '>>>']):
op1 = op
op2 = args[0].op
if op1 == op2:
op = op1
args1 = args[0].args[1] + args[1]
else:
op = op2
args1 = args[0].args[1] - args[1]
args0 = args[0].args[0]
args = [args0, args1]
# A >> X >> Y => A >> (X+Y)
if (op in ['<<', '>>'] and
args[0].is_op(op)):
args = [args[0].args[0], args[0].args[1] + args[1]]
# ((A & A.mask)
if op == "&" and args[-1] == e.mask:
return ExprOp('&', *args[:-1])
# ((A | A.mask)
if op == "|" and args[-1] == e.mask:
return args[-1]
# ! (!X + int) => X - int
# TODO
# ((A & mask) >> shift) whith mask < 2**shift => 0
if op == ">>" and args[1].is_int() and args[0].is_op("&"):
if (args[0].args[1].is_int() and
2 ** args[1].arg > args[0].args[1].arg):
return ExprInt(0, args[0].size)
# parity(int) => int
if op == 'parity' and args[0].is_int():
return ExprInt(parity(int(args[0])), 1)
# (-a) * b * (-c) * (-d) => (-a) * b * c * d
if op == "*" and len(args) > 1:
new_args = []
counter = 0
for a in args:
if a.is_op('-') and len(a.args) == 1:
new_args.append(a.args[0])
counter += 1
else:
new_args.append(a)
if counter % 2:
return -ExprOp(op, *new_args)
args = new_args
# A << int with A ExprCompose => move index
if (op == "<<" and args[0].is_compose() and
args[1].is_int() and int(args[1]) != 0):
final_size = args[0].size
shift = int(args[1])
new_args = []
# shift indexes
for index, arg in args[0].iter_args():
new_args.append((arg, index+shift, index+shift+arg.size))
# filter out expression
filter_args = []
min_index = final_size
for expr, start, stop in new_args:
if start >= final_size:
continue
if stop > final_size:
expr = expr[:expr.size - (stop - final_size)]
stop = final_size
filter_args.append(expr)
min_index = min(start, min_index)
# create entry 0
assert min_index != 0
expr = ExprInt(0, min_index)
args = [expr] + filter_args
return ExprCompose(*args)
# A >> int with A ExprCompose => move index
if op == ">>" and args[0].is_compose() and args[1].is_int():
final_size = args[0].size
shift = int(args[1])
new_args = []
# shift indexes
for index, arg in args[0].iter_args():
new_args.append((arg, index-shift, index+arg.size-shift))
# filter out expression
filter_args = []
max_index = 0
for expr, start, stop in new_args:
if stop <= 0:
continue
if start < 0:
expr = expr[-start:]
start = 0
filter_args.append(expr)
max_index = max(stop, max_index)
# create entry 0
expr = ExprInt(0, final_size - max_index)
args = filter_args + [expr]
return ExprCompose(*args)
# Compose(a) OP Compose(b) with a/b same bounds => Compose(a OP b)
if op in ['|', '&', '^'] and all([arg.is_compose() for arg in args]):
bounds = set()
for arg in args:
bound = tuple([expr.size for expr in arg.args])
bounds.add(bound)
if len(bounds) == 1:
bound = list(bounds)[0]
new_args = [[expr] for expr in args[0].args]
for sub_arg in args[1:]:
for i, expr in enumerate(sub_arg.args):
new_args[i].append(expr)
args = []
for i, arg in enumerate(new_args):
args.append(ExprOp(op, *arg))
return ExprCompose(*args)
# <<<c_rez, >>>c_rez
if op in [">>>c_rez", "<<<c_rez"]:
assert len(args) == 3
dest, rounds, cf = args
# Skipped if rounds is 0
if rounds.is_int(0):
return dest
elif all(map(lambda x: x.is_int(), args)):
# The expression can be resolved
tmp = int(dest)
cf = int(cf)
size = dest.size
tmp_count = (int(rounds) &
(0x3f if size == 64 else 0x1f)) % (size + 1)
if op == ">>>c_rez":
while (tmp_count != 0):
tmp_cf = tmp & 1;
tmp = (tmp >> 1) + (cf << (size - 1))
cf = tmp_cf
tmp_count -= 1
tmp &= int(dest.mask)
elif op == "<<<c_rez":
while (tmp_count != 0):
tmp_cf = (tmp >> (size - 1)) & 1
tmp = (tmp << 1) + cf
cf = tmp_cf
tmp_count -= 1
tmp &= int(dest.mask)
else:
raise RuntimeError("Unknown operation: %s" % op)
return ExprInt(tmp, size=dest.size)
return ExprOp(op, *args)
def simp_cond_op_int(e_s, e):
"Extract conditions from operations"
if not e.op in ["+", "|", "^", "&", "*", '<<', '>>', 'a>>']:
return e
if len(e.args) < 2:
return e
if not e.args[-1].is_int():
return e
a_int = e.args[-1]
conds = []
for a in e.args[:-1]:
if not a.is_cond():
return e
conds.append(a)
if not conds:
return e
c = conds.pop()
c = ExprCond(c.cond,
ExprOp(e.op, c.src1, a_int),
ExprOp(e.op, c.src2, a_int))
conds.append(c)
new_e = ExprOp(e.op, *conds)
return new_e
def simp_cond_factor(e_s, e):
"Merge similar conditions"
if not e.op in ["+", "|", "^", "&", "*", '<<', '>>', 'a>>']:
return e
if len(e.args) < 2:
return e
conds = {}
not_conds = []
multi_cond = False
for a in e.args:
if not a.is_cond():
not_conds.append(a)
continue
c = a.cond
if not c in conds:
conds[c] = []
else:
multi_cond = True
conds[c].append(a)
if not multi_cond:
return e
c_out = not_conds[:]
for c, vals in conds.items():
new_src1 = [x.src1 for x in vals]
new_src2 = [x.src2 for x in vals]
src1 = e_s.expr_simp_wrapper(ExprOp(e.op, *new_src1))
src2 = e_s.expr_simp_wrapper(ExprOp(e.op, *new_src2))
c_out.append(ExprCond(c, src1, src2))
if len(c_out) == 1:
new_e = c_out[0]
else:
new_e = ExprOp(e.op, *c_out)
return new_e
def simp_slice(e_s, e):
"Slice optimization"
# slice(A, 0, a.size) => A
if e.start == 0 and e.stop == e.arg.size:
return e.arg
# Slice(int) => int
elif e.arg.is_int():
total_bit = e.stop - e.start
mask = (1 << (e.stop - e.start)) - 1
return ExprInt(int((e.arg.arg >> e.start) & mask), total_bit)
# Slice(Slice(A, x), y) => Slice(A, z)
elif e.arg.is_slice():
if e.stop - e.start > e.arg.stop - e.arg.start:
raise ValueError('slice in slice: getting more val', str(e))
new_e = ExprSlice(e.arg.arg, e.start + e.arg.start,
e.start + e.arg.start + (e.stop - e.start))
return new_e
elif e.arg.is_compose():
# Slice(Compose(A), x) => Slice(A, y)
for index, arg in e.arg.iter_args():
if index <= e.start and index+arg.size >= e.stop:
new_e = arg[e.start - index:e.stop - index]
return new_e
# Slice(Compose(A, B, C), x) => Compose(A, B, C) with truncated A/B/C
out = []
for index, arg in e.arg.iter_args():
# arg is before slice start
if e.start >= index + arg.size:
continue
# arg is after slice stop
elif e.stop <= index:
continue
# arg is fully included in slice
elif e.start <= index and index + arg.size <= e.stop:
out.append(arg)
continue
# arg is truncated at start
if e.start > index:
slice_start = e.start - index
a_start = 0
else:
# arg is not truncated at start
slice_start = 0
a_start = index - e.start
# a is truncated at stop
if e.stop < index + arg.size:
slice_stop = arg.size + e.stop - (index + arg.size) - slice_start
a_stop = e.stop - e.start
else:
slice_stop = arg.size
a_stop = index + arg.size - e.start
out.append(arg[slice_start:slice_stop])
return ExprCompose(*out)
# ExprMem(x, size)[:A] => ExprMem(x, a)
# XXXX todo hum, is it safe?
elif (e.arg.is_mem() and
e.start == 0 and
e.arg.size > e.stop and e.stop % 8 == 0):
e = ExprMem(e.arg.arg, size=e.stop)
return e
# distributivity of slice and &
# (a & int)[x:y] => 0 if int[x:y] == 0
elif e.arg.is_op("&") and e.arg.args[-1].is_int():
tmp = e_s.expr_simp_wrapper(e.arg.args[-1][e.start:e.stop])
if tmp.is_int(0):
return tmp
# distributivity of slice and exprcond
# (a?int1:int2)[x:y] => (a?int1[x:y]:int2[x:y])
elif e.arg.is_cond() and e.arg.src1.is_int() and e.arg.src2.is_int():
src1 = e.arg.src1[e.start:e.stop]
src2 = e.arg.src2[e.start:e.stop]
e = ExprCond(e.arg.cond, src1, src2)
# (a * int)[0:y] => (a[0:y] * int[0:y])
elif e.start == 0 and e.arg.is_op("*") and e.arg.args[-1].is_int():
args = [e_s.expr_simp_wrapper(a[e.start:e.stop]) for a in e.arg.args]
e = ExprOp(e.arg.op, *args)
# (a >> int)[x:y] => a[x+int:y+int] with int+y <= a.size
# (a << int)[x:y] => a[x-int:y-int] with x-int >= 0
elif (e.arg.is_op() and e.arg.op in [">>", "<<"] and
e.arg.args[1].is_int()):
arg, shift = e.arg.args
shift = int(shift)
if e.arg.op == ">>":
if shift + e.stop <= arg.size:
return arg[e.start + shift:e.stop + shift]
elif e.arg.op == "<<":
if e.start - shift >= 0:
return arg[e.start - shift:e.stop - shift]
else:
raise ValueError('Bad case')
return e
def simp_compose(e_s, e):
"Commons simplification on ExprCompose"
args = merge_sliceto_slice(e)
out = []
# compose of compose
for arg in args:
if arg.is_compose():
out += arg.args
else:
out.append(arg)
args = out
# Compose(a) with a.size = compose.size => a
if len(args) == 1 and args[0].size == e.size:
return args[0]
# {(X[z:], 0, X.size-z), (0, X.size-z, X.size)} => (X >> z)
if len(args) == 2 and args[1].is_int(0):
if (args[0].is_slice() and
args[0].stop == args[0].arg.size and
args[0].size + args[1].size == args[0].arg.size):
new_e = args[0].arg >> ExprInt(args[0].start, args[0].arg.size)
return new_e
# {@X[base + i] 0 X, @Y[base + i + X] X (X + Y)} => @(X+Y)[base + i]
for i, arg in enumerate(args[:-1]):
nxt = args[i + 1]
if arg.is_mem() and nxt.is_mem():
gap = e_s(nxt.arg - arg.arg)
if gap.is_int() and int(gap) == arg.size / 8:
args = args[:i] + [ExprMem(arg.arg,
arg.size + nxt.size)] + args[i + 2:]
return ExprCompose(*args)
# Compose with ExprCond with integers for src1/src2 and intergers =>
# propagage integers
# {XXX?(0x0,0x1)?(0x0,0x1),0,8, 0x0,8,32} => XXX?(int1, int2)
ok = True
expr_cond_index = None
expr_ints_or_conds = []
for i, arg in enumerate(args):
if not is_int_or_cond_src_int(arg):
ok = False
break
expr_ints_or_conds.append(arg)
if arg.is_cond():
if expr_cond_index is not None:
ok = False
expr_cond_index = i
cond = arg
if ok and expr_cond_index is not None:
src1 = []
src2 = []
for i, arg in enumerate(expr_ints_or_conds):
if i == expr_cond_index:
src1.append(arg.src1)
src2.append(arg.src2)
else:
src1.append(arg)
src2.append(arg)
src1 = e_s.apply_simp(ExprCompose(*src1))
src2 = e_s.apply_simp(ExprCompose(*src2))
if src1.is_int() and src2.is_int():
return ExprCond(cond.cond, src1, src2)
return ExprCompose(*args)
def simp_cond(e_s, e):
"Common simplifications on ExprCond"
# eval exprcond src1/src2 with satifiable/unsatisfiable condition
# propagation
if (not e.cond.is_int()) and e.cond.size == 1:
src1 = e.src1.replace_expr({e.cond: ExprInt(1, 1)})
src2 = e.src2.replace_expr({e.cond: ExprInt(0, 1)})
if src1 != e.src1 or src2 != e.src2:
return ExprCond(e.cond, src1, src2)
# -A ? B:C => A ? B:C
if e.cond.is_op('-') and len(e.cond.args) == 1:
e = ExprCond(e.cond.args[0], e.src1, e.src2)
# a?x:x
elif e.src1 == e.src2:
e = e.src1
# int ? A:B => A or B
elif e.cond.is_int():
if e.cond.arg == 0:
e = e.src2
else:
e = e.src1
# a?(a?b:c):x => a?b:x
elif e.src1.is_cond() and e.cond == e.src1.cond:
e = ExprCond(e.cond, e.src1.src1, e.src2)
# a?x:(a?b:c) => a?x:c
elif e.src2.is_cond() and e.cond == e.src2.cond:
e = ExprCond(e.cond, e.src1, e.src2.src2)
# a|int ? b:c => b with int != 0
elif (e.cond.is_op('|') and
e.cond.args[1].is_int() and
e.cond.args[1].arg != 0):
return e.src1
# (C?int1:int2)?(A:B) =>
elif (e.cond.is_cond() and
e.cond.src1.is_int() and
e.cond.src2.is_int()):
int1 = e.cond.src1.arg.arg
int2 = e.cond.src2.arg.arg
if int1 and int2:
e = e.src1
elif int1 == 0 and int2 == 0:
e = e.src2
elif int1 == 0 and int2:
e = ExprCond(e.cond.cond, e.src2, e.src1)
elif int1 and int2 == 0:
e = ExprCond(e.cond.cond, e.src1, e.src2)
return e
|
chubbymaggie/miasm
|
miasm2/expression/simplifications_common.py
|
Python
|
gpl-2.0
| 21,237
|
# -*- coding: utf-8 -*-
"""
Industrial Dual Analog In Plugin
Copyright (C) 2015 Olaf Lüke <olaf@tinkerforge.com>
Copyright (C) 2015-2016 Matthias Bolte <matthias@tinkerforge.com>
industrial_dual_analog_in.py: Industrial Dual Analog In Plugin Implementation
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
"""
from PyQt5.QtWidgets import QVBoxLayout, QLabel, QHBoxLayout, QComboBox, QPushButton, QFrame, QDialog, QMessageBox
from PyQt5.QtCore import Qt
from brickv.plugin_system.plugin_base import PluginBase
from brickv.bindings.bricklet_industrial_dual_analog_in import BrickletIndustrialDualAnalogIn
from brickv.plot_widget import PlotWidget, CurveValueWrapper
from brickv.async_call import async_call
from brickv.callback_emulator import CallbackEmulator
from brickv.utils import get_modeless_dialog_flags
from brickv.plugin_system.plugins.industrial_dual_analog_in.ui_calibration import Ui_Calibration
from brickv.utils import format_voltage
def is_int32(value):
return value >= -2147483648 and value <= 2147483647
class Calibration(QDialog, Ui_Calibration):
def __init__(self, parent):
QDialog.__init__(self, parent, get_modeless_dialog_flags())
self.parent = parent
self.values0 = [0] * 10
self.values1 = [0] * 10
self.values_index = 0
self.setupUi(self)
self.button_cal_remove.clicked.connect(self.remove_clicked)
self.button_cal_offset.clicked.connect(self.offset_clicked)
self.button_cal_gain.clicked.connect(self.gain_clicked)
self.button_close.clicked.connect(self.close)
self.cbe_adc_values = CallbackEmulator(self,
self.parent.analog_in.get_adc_values,
None,
self.cb_adc_values,
self.parent.increase_error_count)
def show(self):
QDialog.show(self)
self.cbe_adc_values.set_period(100)
self.current_offset0 = 0
self.current_offset1 = 0
self.current_gain0 = 0
self.current_gain1 = 0
self.update_calibration()
def update_calibration(self):
async_call(self.parent.analog_in.get_calibration, None, self.get_calibration_async, self.parent.increase_error_count)
def remove_clicked(self):
self.parent.analog_in.set_calibration((0, 0), (0, 0))
self.update_calibration()
def offset_clicked(self):
self.parent.analog_in.set_calibration((-sum(self.values0) // 10, -sum(self.values1) // 10), (self.current_gain0, self.current_gain1))
self.update_calibration()
def gain_clicked(self):
try:
if self.parent.has_fixed_calibration:
measured0 = (sum(self.values0) / 10.0) * 244 / 44983
measured1 = (sum(self.values1) / 10.0) * 244 / 44983
else:
measured0 = (sum(self.values0) / 10.0) * 244 / 38588
measured1 = (sum(self.values1) / 10.0) * 244 / 38588
factor0 = self.spinbox_voltage_ch0.value()/measured0
factor1 = self.spinbox_voltage_ch1.value()/measured1
gain0 = int((factor0 - 1) * 2 ** 23)
gain1 = int((factor1 - 1) * 2 ** 23)
if not is_int32(gain0) or not is_int32(gain1):
raise ValueError("Out of range")
except:
QMessageBox.critical(self, "Failure during Calibration", "Calibration values are not in range.", QMessageBox.Ok)
return
self.parent.analog_in.set_calibration((self.current_offset0, self.current_offset1), (gain0, gain1))
self.update_calibration()
def get_calibration_async(self, cal):
self.current_offset0 = cal.offset[0]
self.current_offset1 = cal.offset[1]
self.current_gain0 = cal.gain[0]
self.current_gain1 = cal.gain[1]
self.label_offset0.setText(str(cal.offset[0]))
self.label_offset1.setText(str(cal.offset[1]))
self.label_gain0.setText(str(cal.gain[0]))
self.label_gain1.setText(str(cal.gain[1]))
def cb_adc_values(self, values):
self.values0[self.values_index] = values[0]
self.values1[self.values_index] = values[1]
self.values_index += 1
if self.values_index >= 10:
self.values_index = 0
self.label_adc0.setText(str(sum(self.values0) // 10))
self.label_adc1.setText(str(sum(self.values1) // 10))
def closeEvent(self, event):
self.parent.calibration_button.setEnabled(True)
self.cbe_adc_values.set_period(0)
class IndustrialDualAnalogIn(PluginBase):
def __init__(self, *args):
super().__init__(BrickletIndustrialDualAnalogIn, *args)
self.analog_in = self.device
# the firmware version of a EEPROM Bricklet can (under common circumstances)
# not change during the lifetime of an EEPROM Bricklet plugin. therefore,
# it's okay to make final decisions based on it here
self.has_fixed_calibration = self.firmware_version >= (2, 0, 1)
self.cbe_voltage0 = CallbackEmulator(self,
self.analog_in.get_voltage,
0,
self.cb_voltage,
self.increase_error_count,
pass_arguments_to_result_callback=True)
self.cbe_voltage1 = CallbackEmulator(self,
self.analog_in.get_voltage,
1,
self.cb_voltage,
self.increase_error_count,
pass_arguments_to_result_callback=True)
self.calibration = None
self.sample_rate_label = QLabel('Sample Rate:')
self.sample_rate_combo = QComboBox()
self.sample_rate_combo.addItem('976 Hz')
self.sample_rate_combo.addItem('488 Hz')
self.sample_rate_combo.addItem('244 Hz')
self.sample_rate_combo.addItem('122 Hz')
self.sample_rate_combo.addItem('61 Hz')
self.sample_rate_combo.addItem('4 Hz')
self.sample_rate_combo.addItem('2 Hz')
self.sample_rate_combo.addItem('1 Hz')
self.current_voltage = [CurveValueWrapper(), CurveValueWrapper()] # float, V
self.calibration_button = QPushButton('Calibration...')
self.sample_rate_combo.currentIndexChanged.connect(self.sample_rate_combo_index_changed)
self.calibration_button.clicked.connect(self.calibration_button_clicked)
plots = [('Channel 0', Qt.red, self.current_voltage[0], format_voltage),
('Channel 1', Qt.blue, self.current_voltage[1], format_voltage)]
self.plot_widget = PlotWidget('Voltage [V]', plots, y_resolution=0.001)
hlayout = QHBoxLayout()
hlayout.addWidget(self.sample_rate_label)
hlayout.addWidget(self.sample_rate_combo)
hlayout.addStretch()
hlayout.addWidget(self.calibration_button)
line = QFrame()
line.setObjectName("line")
line.setFrameShape(QFrame.HLine)
line.setFrameShadow(QFrame.Sunken)
layout = QVBoxLayout(self)
layout.addWidget(self.plot_widget)
layout.addWidget(line)
layout.addLayout(hlayout)
def start(self):
async_call(self.analog_in.get_sample_rate, None, self.get_sample_rate_async, self.increase_error_count)
self.cbe_voltage0.set_period(100)
self.cbe_voltage1.set_period(100)
self.plot_widget.stop = False
def stop(self):
self.cbe_voltage0.set_period(0)
self.cbe_voltage1.set_period(0)
self.plot_widget.stop = True
def destroy(self):
if self.calibration != None:
self.calibration.close()
@staticmethod
def has_device_identifier(device_identifier):
return device_identifier == BrickletIndustrialDualAnalogIn.DEVICE_IDENTIFIER
def get_voltage_value0(self):
return self.voltage_value[0]
def get_voltage_value1(self):
return self.voltage_value[1]
def calibration_button_clicked(self):
if self.calibration == None:
self.calibration = Calibration(self)
self.calibration_button.setEnabled(False)
self.calibration.show()
def sample_rate_combo_index_changed(self, index):
async_call(self.analog_in.set_sample_rate, index, None, self.increase_error_count)
def get_sample_rate_async(self, rate):
self.sample_rate_combo.setCurrentIndex(rate)
def cb_voltage(self, sensor, voltage):
self.current_voltage[sensor].value = voltage / 1000.0
|
Tinkerforge/brickv
|
src/brickv/plugin_system/plugins/industrial_dual_analog_in/industrial_dual_analog_in.py
|
Python
|
gpl-2.0
| 9,465
|
#!/usr/bin/env python
import jsonschema
import json
import os
import sys
import os.path as op
import tempfile
import pytest
from argparse import ArgumentParser, RawTextHelpFormatter
from jsonschema import ValidationError
from boutiques.validator import DescriptorValidationError
from boutiques.publisher import ZenodoError
from boutiques.invocationSchemaHandler import InvocationValidationError
from boutiques.localExec import ExecutorOutput
from boutiques.localExec import ExecutorError
from boutiques.exporter import ExportError
from boutiques.importer import ImportError
from boutiques.localExec import loadJson, addDefaultValues
from boutiques.logger import raise_error
from tabulate import tabulate
def prettyprint(*params):
parser = ArgumentParser("Boutiques pretty-print for generating help text")
parser.add_argument("descriptor", action="store",
help="The Boutiques descriptor.")
results = parser.parse_args(params)
from boutiques.prettyprint import PrettyPrinter
desc = loadJson(results.descriptor)
prettyclass = PrettyPrinter(desc)
return prettyclass.docstring
def create(*params):
parser = ArgumentParser("Boutiques descriptor creator")
parser.add_argument("descriptor", action="store",
help="Output file to store descriptor in.")
parser.add_argument("--docker-image", '-d', action="store",
help="Name of Docker image on DockerHub.")
parser.add_argument("--use-singularity", '-u', action="store_true",
help="When --docker-image is used. Specify to "
"use singularity to run it.")
results = parser.parse_args(params)
from boutiques.creator import CreateDescriptor
new = CreateDescriptor(parser=None,
docker_image=results.docker_image,
use_singularity=results.use_singularity)
new.save(results.descriptor)
return None
def validate(*params):
parser = ArgumentParser("Boutiques descriptor validator")
parser.add_argument("descriptor", action="store",
help="The Boutiques descriptor as a JSON file, JSON "
"string or Zenodo ID (prefixed by 'zenodo.').")
parser.add_argument("--bids", "-b", action="store_true",
help="Flag indicating if descriptor is a BIDS app")
parser.add_argument("--format", "-f", action="store_true",
help="If descriptor is valid, rewrite it with sorted"
" keys.")
results = parser.parse_args(params)
from boutiques.validator import validate_descriptor
descriptor = validate_descriptor(results.descriptor,
format_output=results.format)
if results.bids:
from boutiques.bids import validate_bids
validate_bids(descriptor, valid=True)
def execute(*params):
parser = ArgumentParser("Boutiques local executor", add_help=False)
parser.add_argument("mode", action="store",
help="Mode of operation to use. Launch: takes a "
"set of inputs compliant with invocation schema "
"and launches the tool. Simulate: shows sample "
"command-lines based on the provided descriptor"
" based on provided or randomly generated inputs. "
"Prepare: pulls the Docker or Singularity container "
"image for a given descriptor. ",
choices=["launch", "simulate", "prepare"])
parser.add_argument("--help", "-h", action="store_true",
help="show this help message and exit")
helps = any([True for ht in ["--help", "-h"] if ht in params])
if len(params) <= 1 and helps:
parser.print_help()
raise SystemExit
args, params = parser.parse_known_args(params)
mode = args.mode
params += ["--help"] if args.help is True else []
if mode == "launch":
parser = ArgumentParser("Launches an invocation.")
parser.add_argument("descriptor", action="store",
help="The Boutiques descriptor as a JSON file, "
"JSON string or Zenodo ID (prefixed by 'zenodo.').")
parser.add_argument("invocation", action="store",
help="Input JSON complying to invocation.")
parser.add_argument("-v", "--volumes", action="append", type=str,
help="Volumes to mount when launching the "
"container. Format consistently the following:"
" /a:/b will mount local directory /a to "
"container directory /b.")
parser.add_argument("-x", "--debug", action="store_true",
help="Keeps temporary scripts used during "
"execution, and prints additional debug "
"messages.")
parser.add_argument("-u", "--user", action="store_true",
help="Runs the container as local user ({0})"
" instead of root.".format(os.getenv("USER")))
parser.add_argument("-s", "--stream", action="store_true",
help="Streams stdout and stderr in real time "
"during execution.")
parser.add_argument("--imagepath", action="store",
help="Path to Singularity image. "
"If not specified, will use current directory.")
results = parser.parse_args(params)
descriptor = results.descriptor
inp = results.invocation
# Validate invocation and descriptor
valid = invocation(descriptor, '-i', inp)
# Generate object that will perform the commands
from boutiques.localExec import LocalExecutor
executor = LocalExecutor(descriptor, inp,
{"forcePathType": True,
"debug": results.debug,
"changeUser": results.user,
"stream": results.stream,
"imagePath": results.imagepath})
# Execute it
return executor.execute(results.volumes)
if mode == "simulate":
parser = ArgumentParser("Simulates an invocation.")
parser.add_argument("descriptor", action="store",
help="The Boutiques descriptor as a JSON file, "
"JSON string or Zenodo ID (prefixed by 'zenodo.').")
parser.add_argument("-i", "--input", action="store",
help="Input JSON complying to invocation.")
parser.add_argument("-j", "--json", action="store_true",
help="Flag to generate invocation in JSON format.")
results = parser.parse_args(params)
descriptor = results.descriptor
# Do some basic input scrubbing
inp = results.input
valid = invocation(descriptor, '-i', inp) if inp else\
invocation(descriptor)
# Generate object that will perform the commands
from boutiques.localExec import LocalExecutor
executor = LocalExecutor(descriptor, inp,
{"forcePathType": True,
"destroyTempScripts": True,
"changeUser": True})
if not inp:
executor.generateRandomParams(1)
if results.json:
sout = [json.dumps(executor.in_dict, indent=4, sort_keys=True)]
print(sout[0])
else:
executor.printCmdLine()
sout = executor.cmd_line
# for consistency with execute
# Adding hide to "container location" field since it's an invalid
# value, can parse that to hide the summary print
return ExecutorOutput(os.linesep.join(sout), "",
0, "", [], [], os.linesep.join(sout), "", "hide")
if mode == "prepare":
parser = ArgumentParser("Pulls the container image for a given "
"descriptor")
parser.add_argument("descriptor", action="store",
help="The Boutiques descriptor as a JSON file, "
"JSON string or Zenodo ID (prefixed by 'zenodo.').")
parser.add_argument("-x", "--debug", action="store_true",
help="Keeps temporary scripts used during "
"execution, and prints additional debug "
"messages.")
parser.add_argument("-s", "--stream", action="store_true",
help="Streams stdout and stderr in real time "
"during execution.")
parser.add_argument("--imagepath", action="store",
help="Path to Singularity image. "
"If not specified, will use current directory.")
results = parser.parse_args(params)
descriptor = results.descriptor
# Validate descriptor
valid = invocation(descriptor)
# Generate object that will perform the commands
from boutiques.localExec import LocalExecutor
executor = LocalExecutor(descriptor, None,
{"forcePathType": True,
"debug": results.debug,
"stream": results.stream,
"imagePath": results.imagepath})
container_location = executor.prepare()[1]
print("Container location: " + container_location)
# Adding hide to "container location" field since it's an invalid
# value, and we can parse that to hide the summary print
return ExecutorOutput(container_location, "",
0, "", [], [], "", "", "hide")
def importer(*params):
parser = ArgumentParser("Imports old descriptor or BIDS app or CWL "
" descriptor to spec.")
parser.add_argument("type", help="Type of import we are performing",
choices=["bids", "0.4", "cwl"])
parser.add_argument("output_descriptor", help="Where the Boutiques"
" descriptor will be written.")
parser.add_argument("input_descriptor", help="Input descriptor to be "
"converted. For '0.4'"
", is JSON descriptor,"
" for 'bids' is base directory of BIDS app, "
"for 'cwl' is YAML descriptor.")
parser.add_argument("-o", "--output-invocation", help="Where to write "
"the invocation if any.")
parser.add_argument("-i", "--input-invocation", help="Input invocation "
" for CWL if any.")
results = parser.parse_args(params)
from boutiques.importer import Importer
importer = Importer(results.input_descriptor,
results.output_descriptor,
results.input_invocation,
results.output_invocation)
if results.type == "0.4":
importer.upgrade_04()
elif results.type == "bids":
importer.import_bids()
elif results.type == "cwl":
importer.import_cwl()
def exporter(*params):
parser = ArgumentParser("Export Boutiques descriptor to other formats.")
parser.add_argument("type", help="Type of export we are performing.",
choices=["carmin"])
parser.add_argument("descriptor", help="Boutiques descriptor to export.")
parser.add_argument("--identifier", help="Identifier to use in"
"CARMIN export.")
parser.add_argument("output", help="Output file where to write the"
" converted descriptor.")
results = parser.parse_args(params)
descriptor = results.descriptor
output = results.output
bosh(["validate", results.descriptor])
from boutiques.exporter import Exporter
exporter = Exporter(descriptor, results.identifier)
if results.type == "carmin":
exporter.carmin(output)
def publish(*params):
parser = ArgumentParser("Boutiques publisher",
description="A publisher of Boutiques tools"
" in Zenodo (http://zenodo.org). Requires "
"a Zenodo access token, see "
"http://developers.zenodo.org/#authentication.")
parser.add_argument("boutiques_descriptor", action="store",
help="local path of the "
" Boutiques descriptor to publish.")
parser.add_argument("--sandbox", action="store_true",
help="publish to Zenodo's sandbox instead of "
"production server. Recommended for tests.")
parser.add_argument("--zenodo-token", action="store",
help="Zenodo API token to use for authentication. "
"If not used, token will be read from configuration "
"file or requested interactively.")
parser.add_argument("--no-int", '-y', action="store_true",
help="disable interactive input.")
parser.add_argument("-v", "--verbose", action="store_true",
help="print information messages.")
group = parser.add_mutually_exclusive_group()
group.add_argument("-r", "--replace", action="store_true",
help="Publish an updated version of an existing "
"record. The descriptor must contain a DOI, which "
"will be replaced with a new one.")
group.add_argument("--id", action="store",
help="Zenodo ID of an existing record you wish to "
"update with a new version, prefixed by "
"'zenodo.' (e.g. zenodo.123456).")
results = parser.parse_args(params)
from boutiques.publisher import Publisher
publisher = Publisher(results.boutiques_descriptor,
results.zenodo_token,
results.verbose,
results.sandbox,
results.no_int,
results.replace,
results.id)
publisher.publish()
if hasattr(publisher, 'doi'):
return publisher.doi
def invocation(*params):
parser = ArgumentParser("Creates invocation schema and validates"
" invocations. Uses descriptor's invocation"
" schema if it exists, otherwise creates one.")
parser.add_argument("descriptor", action="store",
help="The Boutiques descriptor as a JSON file, JSON "
"string or Zenodo ID (prefixed by 'zenodo.').")
parser.add_argument("-i", "--invocation", action="store",
help="Input values in a JSON file or as a JSON "
"object to be validated against "
"the invocation schema.")
parser.add_argument("-w", "--write-schema", action="store_true",
help="If descriptor doesn't have an invocation "
"schema, creates one and writes it to the descriptor"
" file ")
result = parser.parse_args(params)
validate(result.descriptor)
descriptor = loadJson(result.descriptor)
if descriptor.get("invocation-schema"):
invSchema = descriptor.get("invocation-schema")
else:
from boutiques.invocationSchemaHandler import generateInvocationSchema
invSchema = generateInvocationSchema(descriptor)
if result.write_schema:
descriptor["invocation-schema"] = invSchema
with open(result.descriptor, "w") as f:
f.write(json.dumps(descriptor, indent=4, sort_keys=True))
if result.invocation:
from boutiques.invocationSchemaHandler import validateSchema
data = addDefaultValues(descriptor, loadJson(result.invocation))
validateSchema(invSchema, data)
def evaluate(*params):
parser = ArgumentParser("Evaluates parameter values for a descriptor"
" and invocation")
parser.add_argument("descriptor", action="store",
help="The Boutiques descriptor as a JSON file, JSON "
"string or Zenodo ID (prefixed by 'zenodo.').")
parser.add_argument("invocation", action="store",
help="Input JSON complying to invocation.")
parser.add_argument("query", action="store", nargs="*",
help="The query to be performed. Simply request keys "
"from the descriptor (i.e. output-files), and chain "
"together queries (i.e. id=myfile or optional=false) "
"slashes between them and commas connecting them. "
"(i.e. output-files/optional=false,id=myfile). "
"Perform multiple queries by separating them with a "
"space.")
result = parser.parse_args(params)
# Generate object that will parse the invocation and descriptor
from boutiques.localExec import LocalExecutor
executor = LocalExecutor(result.descriptor, result.invocation,
{"forcePathType": True,
"destroyTempScripts": True,
"changeUser": True})
from boutiques.evaluate import evaluateEngine
query_results = []
for query in result.query:
query_results += [evaluateEngine(executor, query)]
return query_results[0] if len(query_results) == 1 else query_results
def test(*params):
parser = ArgumentParser("Perform all the tests defined within the"
" given descriptor")
parser.add_argument("descriptor", action="store",
help="The Boutiques descriptor as a JSON file, JSON "
"string or Zenodo ID (prefixed by 'zenodo.').")
result = parser.parse_args(params)
# Generation of the invocation schema (and descriptor validation).
invocation(result.descriptor)
# Extraction of all the invocations defined for the test-cases.
descriptor = loadJson(result.descriptor)
if (not descriptor.get("tests")):
# If no tests have been specified, we consider testing successful.
return 0
for test in descriptor["tests"]:
invocation_JSON = test["invocation"]
# Check if the invocation is valid.
invocation(result.descriptor, "--invocation",
json.dumps(invocation_JSON))
# Invocations have been properly validated. We can launch the actual tests.
test_path = op.join(op.dirname(op.realpath(__file__)), "test.py")
return pytest.main([test_path, "--descriptor", result.descriptor])
def search(*params):
parser = ArgumentParser("Search Zenodo for Boutiques descriptors. "
"When no term is supplied, will search for "
"all descriptors.")
parser.add_argument("query", nargs="?", default="boutiques",
action="store", help="Search query")
parser.add_argument("-v", "--verbose", action="store_true",
help="Print information messages")
parser.add_argument("--sandbox", action="store_true",
help="search Zenodo's sandbox instead of "
"production server. Recommended for tests.")
parser.add_argument("-m", "--max", action="store",
help="Specify the maximum number of results "
"to be returned. Default is 10.")
parser.add_argument("-nt", "--no-trunc", action="store_true",
help="Do not truncate long tool descriptions.")
parser.add_argument("-e", "--exact", action="store_true",
help="Only return results containing the exact query.")
result = parser.parse_args(params)
from boutiques.searcher import Searcher
searcher = Searcher(result.query, result.verbose, result.sandbox,
result.max, result.no_trunc, result.exact)
return searcher.search()
def pull(*params):
parser = ArgumentParser("Download a descriptor from Zenodo.")
parser.add_argument("zid", action="store", help="Zenodo ID "
"of the descriptor to pull, prefixed by "
"'zenodo.', e.g. zenodo.123456")
parser.add_argument("-v", "--verbose", action="store_true",
help="Print information messages")
parser.add_argument("--sandbox", action="store_true",
help="pull from Zenodo's sandbox instead of "
"production server. Recommended for tests.")
result = parser.parse_args(params)
from boutiques.puller import Puller
puller = Puller(result.zid, result.verbose, result.sandbox)
return puller.pull()
def bosh(args=None):
parser = ArgumentParser(description="Driver for Bosh functions",
add_help=False)
parser.add_argument("function", action="store", nargs="?",
help="The tool within boutiques/bosh you wish to run. "
"Create: creates an Boutiques descriptor from scratch. "
"Validate: validates an existing boutiques descriptor. "
"Exec: launches or simulates an execution given a "
"descriptor and a set of inputs. Import: creates a "
"descriptor for a BIDS app or updates a descriptor "
"from an older version of the schema. Export: exports a"
"descriptor to other formats. Publish: creates"
"an entry in Zenodo for the descriptor and "
"adds the DOI created by Zenodo to the descriptor. "
"Invocation: generates the invocation schema for a "
"given descriptor. Evaluate: given an invocation and a "
"descriptor, queries execution properties. "
"Test: run pytest on a descriptor detailing tests. "
"Example: Generates example command-line for descriptor"
". Search: search Zenodo for descriptors. "
"Pull: download a descriptor from Zenodo. "
"Pprint: generate pretty help text from a descriptor."
"Version: prints the version of this tool.",
choices=["create", "validate", "exec", "import",
"export", "publish", "invocation", "evaluate",
"test", "example", "search", "pull", "pprint",
"version"])
parser.add_argument("--help", "-h", action="store_true",
help="show this help message and exit")
args, params = parser.parse_known_args(args)
func = args.function
params += ["--help"] if args.help is True else []
# Returns True if bosh was called from the CLI
def runs_as_cli():
return os.path.basename(sys.argv[0]) == "bosh"
def bosh_return(val, code=0, hide=False, formatted=None):
if runs_as_cli():
if hide:
return code
if val is not None:
if formatted is not None:
print(formatted)
else:
print(val)
else:
if code == 0:
print("OK")
else:
print("Failed")
return code # everything went well
return val # calling function wants this value
try:
if func == "create":
out = create(*params)
return bosh_return(out, hide=True)
elif func == "validate":
out = validate(*params)
return bosh_return(out)
elif func == "exec":
out = execute(*params)
# If executed through CLI, print 'out' and return exit_code
# Otherwise, return out
return bosh_return(out, out.exit_code,
hide=bool(out.container_location == 'hide'))
elif func == "example":
out = execute('simulate', '-j', *params)
return bosh_return(out, out.exit_code,
hide=bool(out.container_location == 'hide'))
elif func == "import":
out = importer(*params)
return bosh_return(out)
elif func == "export":
out = exporter(*params)
return bosh_return(out)
elif func == "publish":
out = publish(*params)
return bosh_return(out)
elif func == "invocation":
out = invocation(*params)
return bosh_return(out)
elif func == "evaluate":
out = evaluate(*params)
return bosh_return(out)
elif func == "test":
out = test(*params)
return bosh_return(out)
elif func == "pprint":
out = prettyprint(*params)
return bosh_return(out)
elif func == "search":
out = search(*params)
return bosh_return(out, formatted=tabulate(out, headers='keys',
tablefmt='plain'))
elif func == "pull":
out = pull(*params)
return bosh_return(out, hide=True)
elif func == "version":
from boutiques.__version__ import VERSION
return bosh_return(VERSION)
else:
parser.print_help()
raise SystemExit
except (ZenodoError,
DescriptorValidationError,
InvocationValidationError,
ValidationError,
ExportError,
ImportError,
ExecutorError) as e:
# We don't want to raise an exception when function is called
# from CLI.'
if runs_as_cli():
try:
print(e.message) # Python 2 only
except Exception as ex:
print(e)
return 99 # Note: this conflicts with tool error codes.
raise e
|
boutiques/schema
|
tools/python/boutiques/bosh.py
|
Python
|
gpl-2.0
| 26,811
|
from geobricks_data_scripts.dev.utils.data_manager_util import get_data_manager
data_manager = get_data_manager()
# TODO How to handle the fact that is in storage?
data_manager.delete("mod13a2", True, False, False)
|
geobricks/geobricks_data_scripts
|
geobricks_data_scripts/dev/storage/data/delete/delete_storage_metadata.py
|
Python
|
gpl-2.0
| 217
|
__author__ = 'oskyar'
from django.db import models
from django.utils.translation import ugettext as _
from s3direct.fields import S3DirectField
from smart_selects.db_fields import ChainedManyToManyField
# Manager de Asignatura
class SubjectManager(models.Manager):
def owner(self, pk_subject):
return self.get(pk=pk_subject).teacher
def by_owner(self, userProfile):
return self.filter(teacher=userProfile)
def get_num_questions(self, subject, type=None):
num_questions = 0
for topic in subject.topics.all():
if type:
for subtopic in topic.subtopics.all():
num_questions += subtopic.questions.filter(type=type).count()
else:
for subtopic in topic.subtopics.all():
num_questions += subtopic.questions.all().count()
return num_questions
def get_all_questions(self, subject, type=None):
questions = list()
for topic in subject.topics.all():
if type:
for subtopic in topic.subtopics.all():
questions += subtopic.questions.filter(type=type)
else:
for subtopic in topic.subtopics.all():
questions += subtopic.questions.all()
return questions
# Asignatura.
class Subject(models.Model):
# id = Id creada por defecto por django
teacher = models.ForeignKey(
'user.UserProfile',
related_name='subjects')
students = ChainedManyToManyField(
'user.UserProfile',
chained_field='student',
chained_model_field='user',
auto_choose=True,
related_name="my_subjects")
name = models.CharField(
max_length=128,
blank=False,
null=False,
verbose_name=_("Nombre de la asignatura"))
description = models.CharField(
max_length=512,
blank=False,
null=False,
verbose_name=_("Breve descripción, máximo 512 caracteres"))
category = models.CharField(
max_length=75,
blank=False,
null=False,
verbose_name=_("Categoría"))
test_opt = models.BooleanField(
blank=False,
null=False,
verbose_name=_("Examen final directo"))
capacity = models.IntegerField(
null=True,
verbose_name=_("Nº máx. alumnos"))
image = S3DirectField(
dest='subjects',
blank=True,
null=True,
verbose_name="Imagen de la asignatura")
created_on = models.DateTimeField(blank=True, null=False)
# pos_image = models.CharField(blank=True, null=True, max_length=250)
objects = SubjectManager()
class Meta:
permissions = (
('view_subject', 'View detail Subject'),
('register_subject', 'Student registers of subject'),
('unregister_subject', 'Student unregisters of subject')
)
def __str__(self):
return self.name + " (" + self.category + ")"
|
oskyar/test-TFG
|
TFG/apps/subject/models.py
|
Python
|
gpl-2.0
| 2,996
|
import os
from com.googlecode.fascinator.api.indexer import SearchRequest
from com.googlecode.fascinator.api.storage import StorageException
from com.googlecode.fascinator.common.solr import SolrDoc, SolrResult
from org.apache.tapestry5.internal.services import URLEncoderImpl
from org.apache.tapestry5.internal import TapestryInternalUtils
from java.io import ByteArrayInputStream, ByteArrayOutputStream
from java.lang import Boolean
from java.net import URLDecoder
from org.apache.commons.io import IOUtils
class DownloadData:
def __init__(self):
pass
def __activate__(self, context):
self.services = context["Services"]
self.contextPath = context["contextPath"]
self.pageName = context["pageName"]
self.portalId = context["portalId"]
self.request = context["request"]
self.response = context["response"]
self.formData = context["formData"]
self.page = context["page"]
self.log = context["log"]
self.__metadata = SolrDoc(None)
object = None
payload = None
# URL basics
basePath = self.portalId + "/" + self.pageName
# Turn our URL into objects
fullUri = URLDecoder.decode(self.request.getAttribute("RequestURI"))
fullUri = self.tapestryUrlDecode(fullUri)
uri = fullUri[len(basePath)+1:]
object, payload = self.__resolve(uri)
if object is None:
if uri.endswith("/"):
self.log.error("Object 404: '{}'", uri)
self.response.setStatus(404);
writer = self.response.getPrintWriter("text/plain; charset=UTF-8")
writer.println("Object not found")
writer.close()
return
else:
# Sometimes adding a slash to the end will resolve the problem
self.log.error("Redirecting, object 404: '{}'", uri)
self.response.sendRedirect(context["urlBase"] + fullUri + "/")
return
# Ensure solr metadata is useable
oid = object.getId()
if self.isIndexed():
self.__metadata = self.__solrData.getResults().get(0)
else:
self.__metadata.getJsonObject().put("id", oid)
#print "URI='%s' OID='%s' PID='%s'" % (uri, object.getId(), payload.getId())
## The byte range cache will check for byte range requests first
self.cache = self.services.getByteRangeCache()
processed = self.cache.processRequest(self.request, self.response, payload)
if processed:
# We don't need to return data, the cache took care of it.
return
# Now the 'real' work of payload retrieval
if payload is not None:
filename = os.path.split(payload.getId())[1]
filename = "\"" + filename + "\""
mimeType = payload.getContentType()
if mimeType == "application/octet-stream":
self.response.setHeader("Content-Disposition", "attachment; filename=%s" % filename)
type = payload.getContentType()
# Enocode textual responses before sending
if type is not None and type.startswith("text/"):
out = ByteArrayOutputStream()
IOUtils.copy(payload.open(), out)
payload.close()
writer = self.response.getPrintWriter(type + "; charset=UTF-8")
writer.println(out.toString("UTF-8"))
writer.close()
# Other data can just be streamed out
else:
if type is None:
# Send as raw data
out = self.response.getOutputStream("application/octet-stream")
self.response.setHeader("Content-Disposition", "attachment; filename=%s" % filename)
else:
out = self.response.getOutputStream(type)
self.response.setHeader("Content-Disposition", "attachment; filename=%s" % filename)
IOUtils.copy(payload.open(), out)
payload.close()
object.close()
out.close()
else:
self.response.setStatus(404)
writer = self.response.getPrintWriter("text/plain; charset=UTF-8")
writer.println("Resource not found: uri='%s'" % uri)
writer.close()
def tapestryUrlDecode(self, uri):
tapestryUrlEncoder = URLEncoderImpl()
splitPath = TapestryInternalUtils.splitPath(uri);
decodedArray = []
for splitComponent in splitPath:
decodedArray.append(tapestryUrlEncoder.decode(splitComponent))
return '/'.join(decodedArray)
def getAllowedRoles(self):
metadata = self.getMetadata()
if metadata is not None:
return metadata.getList("security_filter")
else:
return []
def getViewUsers(self):
metadata = self.getMetadata()
if metadata is not None:
return metadata.getList("security_exception")
else:
return []
def getMetadata(self):
return self.__metadata
def isDetail(self):
preview = Boolean.parseBoolean(self.formData.get("preview", "false"))
return not (self.request.isXHR() or preview)
def isIndexed(self):
found = self.__solrData.getNumFound()
return (found is not None) and (found == 1)
def __resolve(self, uri):
# Grab OID from the URL
slash = uri.find("/")
if slash == -1:
return None, None
oid = uri[:slash]
# Query solr for this object
self.__loadSolrData(oid)
if not self.isIndexed():
print "WARNING: Object '%s' not found in index" % oid
sid = None
else:
# Query storage for this object
sid = self.__solrData.getResults().get(0).getFirst("storage_id")
try:
if sid is None:
# Use the URL OID
object = self.services.getStorage().getObject(oid)
else:
# We have a special storage ID from the index
object = self.services.getStorage().getObject(sid)
except StorageException, e:
#print "Failed to access object: %s" % (str(e))
return None, None
# Grab the payload from the rest of the URL
pid = uri[slash+1:]
if pid == "":
# We want the source
pid = object.getSourceId()
# Now get the payload from storage
try:
payload = object.getPayload(pid)
except StorageException, e:
#print "Failed to access payload: %s" % (str(e))
return None, None
# We're done
return object, payload
def __loadSolrData(self, oid):
portal = self.page.getPortal()
query = 'id:"%s"' % oid
if self.isDetail() and portal.getSearchQuery():
query += " AND " + portal.getSearchQuery()
req = SearchRequest(query)
req.addParam("fq", 'item_type:"object"')
if self.isDetail():
req.addParam("fq", portal.getQuery())
current_user = self.page.authentication.get_username()
security_roles = self.page.authentication.get_roles_list()
security_exceptions = 'security_exception:"' + current_user + '"'
owner_query = 'owner:"' + current_user + '"'
security_query = "(" + security_exceptions + ") OR (" + owner_query + ") OR ("+ security_exceptions +")"
req.addParam("fq", security_query)
out = ByteArrayOutputStream()
self.log.error("searching to get solrData")
self.services.getIndexer().search(req, out)
self.__solrData = SolrResult(ByteArrayInputStream(out.toByteArray()))
|
redbox-mint/redbox
|
config/src/main/config/portal/default/redbox/scripts/download.py
|
Python
|
gpl-2.0
| 7,869
|
from routersploit.modules.payloads.cmd.netcat_reverse_tcp import Payload
# netcat reverse tcp payload with lhost=192.168.1.4 lport=4321
reverse_tcp = (
"nc 192.168.1.4 4321 -e /bin/sh"
)
def test_payload_generation():
""" Test scenario - payload generation """
payload = Payload()
payload.lhost = "192.168.1.4"
payload.lport = 4321
assert payload.run() == reverse_tcp
|
dasseclab/dasseclab
|
clones/routersploit/tests/payloads/cmd/test_netcat_reverse_tcp.py
|
Python
|
gpl-2.0
| 398
|
from __future__ import absolute_import, print_function, division
from six.moves import range, map, filter, zip
from six import iteritems
from collections import deque, defaultdict
from .polygon import is_same_direction, line_intersection
from .surface_objects import SaddleConnection
# Vincent question:
# using deque has the disadvantage of losing the initial points
# ideally doig
# my_line[i]
# we should always access to the same element
# I wanted to be able to flow backward thus inserting at the beginning of a list.
# Perhaps it would be better to model this on a deque-like class that is indexed by
# all integers rather than just the non-negative ones? Do you know of such
# a class? Alternately, we could store an offset.
def get_linearity_coeff(u, v):
r"""
Given the two 2-dimensional vectors ``u`` and ``v``, return ``a`` so that
``v = a*u``
If the vectors are not colinear, a ``ValueError`` is raised.
EXAMPLES::
sage: from flatsurf.geometry.straight_line_trajectory import get_linearity_coeff
sage: V = VectorSpace(QQ,2)
sage: get_linearity_coeff(V((1,0)), V((2,0)))
2
sage: get_linearity_coeff(V((2,0)), V((1,0)))
1/2
sage: get_linearity_coeff(V((0,1)), V((0,2)))
2
sage: get_linearity_coeff(V((0,2)), V((0,1)))
1/2
sage: get_linearity_coeff(V((1,2)), V((-2,-4)))
-2
sage: get_linearity_coeff(V((1,1)), V((-1,1)))
Traceback (most recent call last):
...
ValueError: non colinear
"""
if u[0]:
a = v[0]/u[0]
if v[1] != a*u[1]:
raise ValueError("non colinear")
return a
elif v[0]:
raise ValueError("non colinear")
elif u[1]:
return v[1]/u[1]
else:
raise ValueError("zero vector")
class SegmentInPolygon:
r"""
Maximal segment in a polygon of a similarity surface
EXAMPLES::
sage: from flatsurf import *
sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (1/3,-1/4), (0,1))
sage: SegmentInPolygon(v)
Segment in polygon 0 starting at (1/3, -1/3) and ending at (1/3, 0)
"""
def __init__(self, start, end=None):
if not end is None:
# WARNING: here we assume that both start and end are on the
# boundary
self._start = start
self._end = end
else:
self._end = start.forward_to_polygon_boundary()
self._start = self._end.forward_to_polygon_boundary()
def __eq__(self, other):
return type(self) is type(other) and \
self._start == other._start and \
self._end == other._end
def __ne__(self, other):
return type(self) is not type(other) or \
self._start != other._start or \
self._end != other._end
def __repr__(self):
r"""
TESTS::
sage: from flatsurf import *
sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (0,0), (3,-1))
sage: SegmentInPolygon(v)
Segment in polygon 0 starting at (0, 0) and ending at (2, -2/3)
"""
return "Segment in polygon {} starting at {} and ending at {}".format(
self.polygon_label(), self.start().point(), self.end().point())
def start(self):
r"""
Return the tangent vector associated to the start of a trajectory pointed forward.
"""
return self._start
def start_is_singular(self):
return self._start.is_based_at_singularity()
def end(self):
r"""
Return a TangentVector associated to the end of a trajectory, pointed backward.
"""
return self._end
def end_is_singular(self):
return self._end.is_based_at_singularity()
def is_edge(self):
if not self.start_is_singular() or not self.end_is_singular():
return False
vv=self.start().vector()
vertex=self.start().vertex()
ww=self.start().polygon().edge(vertex)
from flatsurf.geometry.polygon import is_same_direction
return is_same_direction(vv,ww)
def edge(self):
if not self.is_edge():
raise ValueError("Segment asked for edge when not an edge")
return self.start().vertex()
def polygon_label(self):
return self._start.polygon_label()
def invert(self):
return SegmentInPolygon(self._end, self._start)
def next(self):
r"""
Return the next segment obtained by continuing straight through the end point.
EXAMPLES::
sage: from flatsurf import *
sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon
sage: s = similarity_surfaces.example()
sage: s.polygon(0)
Polygon: (0, 0), (2, -2), (2, 0)
sage: s.polygon(1)
Polygon: (0, 0), (2, 0), (1, 3)
sage: v = s.tangent_vector(0, (0,0), (3,-1))
sage: seg = SegmentInPolygon(v)
sage: seg
Segment in polygon 0 starting at (0, 0) and ending at (2, -2/3)
sage: seg.next()
Segment in polygon 1 starting at (2/3, 2) and ending at (14/9, 4/3)
"""
if self.end_is_singular():
raise ValueError("Cannot continue from singularity")
return SegmentInPolygon(self._end.invert())
def previous(self):
if self.end_is_singular():
raise ValueError("Cannot continue from singularity")
return SegmentInPolygon(self._start.invert()).invert()
# DEPRECATED STUFF THAT WILL BE REMOVED
def start_point(self):
from sage.misc.superseded import deprecation
deprecation(1, "do not use start_point but start().point()")
return self._start.point()
def start_direction(self):
from sage.misc.superseded import deprecation
deprecation(1, "do not use start_direction but start().vector()")
return self._start.vector()
def end_point(self):
from sage.misc.superseded import deprecation
deprecation(1, "do not use end_point but end().point()")
return self._end.point()
def end_direction(self):
from sage.misc.superseded import deprecation
deprecation(1, "do not use end_direction but end().vector()")
return self._end.vector()
class AbstractStraightLineTrajectory:
r"""
You need to implement:
- ``def segment(self, i)``
- ``def segments(self)``
"""
def surface(self):
raise NotImplementedError
def __repr__(self):
start = self.segment(0).start()
end = self.segment(-1).end()
return "Straight line trajectory made of {} segments from {} in polygon {} to {} in polygon {}".format(
self.combinatorial_length(),
start.point(), start.polygon_label(),
end.point(), end.polygon_label())
def plot(self, *args, **options):
r"""
Plot this trajectory by converting to a graphical trajectory.
If any arguments are provided in `*args` it must be only one argument containing a GraphicalSurface.
The keyword arguments in `**options` are passed on to :func:`GraphicalStraightLineTrajectory.plot`.
EXAMPLES::
sage: from flatsurf import *
sage: T = translation_surfaces.square_torus()
sage: v = T.tangent_vector(0, (0,0), (5,7))
sage: L = v.straight_line_trajectory()
sage: L.plot() # not tested (problem with matplotlib font caches on Travis)
Graphics object consisting of 1 graphics primitive
sage: L.plot(color='red') # not tested (problem with matplotlib font caches on Travis)
Graphics object consisting of 1 graphics primitive
"""
if len(args) > 1:
raise ValueError("SimilaritySurface.plot() can take at most one non-keyword argument.")
if len(args)==1:
from flatsurf.graphical.surface import GraphicalSurface
if not isinstance(args[0], GraphicalSurface):
raise ValueError("If an argument is provided, it must be a GraphicalSurface.")
return self.graphical_trajectory(graphical_surface = args[0]).plot(**options)
return self.graphical_trajectory().plot(**options)
def graphical_trajectory(self, graphical_surface=None, **options):
r"""
Returns a ``GraphicalStraightLineTrajectory`` corresponding to this
trajectory in the provided ``GraphicalSurface``.
"""
from flatsurf.graphical.straight_line_trajectory import GraphicalStraightLineTrajectory
if graphical_surface is None:
graphical_surface = self.surface().graphical_surface()
return GraphicalStraightLineTrajectory(self, graphical_surface, **options)
def cylinder(self):
r"""
If this is a closed orbit, return the associated maximal cylinder.
Raises a ValueError if this trajectory is not closed.
EXAMPLES::
sage: from flatsurf import *
sage: s = translation_surfaces.regular_octagon()
sage: v = s.tangent_vector(0,(1/2,0),(sqrt(2),1))
sage: traj = v.straight_line_trajectory()
sage: traj.flow(4)
sage: traj.is_closed()
True
sage: cyl = traj.cylinder()
sage: cyl.area() # a = sqrt(2)
a + 1
sage: cyl.holonomy()
(3*a + 4, 2*a + 3)
sage: cyl.edges()
(2, 3, 3, 2, 4)
"""
# Note may not be defined.
if not self.is_closed():
raise ValueError("Cylinder is only defined for closed straight-line trajectories.")
from .surface_objects import Cylinder
coding = self.coding()
label = coding[0][0]
edges = [ e for l,e in coding[1:] ]
edges.append(self.surface().opposite_edge(coding[0][0],coding[0][1])[1])
return Cylinder(self.surface(), label, edges)
def coding(self, alphabet=None):
r"""
Return the coding of this trajectory with respect to the sides of the
polygons
INPUT:
- ``alphabet`` -- an optional dictionary ``(lab,nb) -> letter``. If some
labels are avoided then these crossings are ignored.
EXAMPLES::
sage: from flatsurf import *
sage: t = translation_surfaces.square_torus()
sage: v = t.tangent_vector(0, (1/2,0), (5,6))
sage: l = v.straight_line_trajectory()
sage: alphabet = {(0,0): 'a', (0,1): 'b', (0,2):'a', (0,3): 'b'}
sage: l.coding()
[(0, 0), (0, 1)]
sage: l.coding(alphabet)
['a', 'b']
sage: l.flow(10); l.flow(-10)
sage: l.coding()
[(0, 2), (0, 1), (0, 2), (0, 1), (0, 2), (0, 1), (0, 2), (0, 1), (0, 2)]
sage: print(''.join(l.coding(alphabet)))
ababababa
sage: v = t.tangent_vector(0, (1/2,0), (7,13))
sage: l = v.straight_line_trajectory()
sage: l.flow(10); l.flow(-10)
sage: print(''.join(l.coding(alphabet)))
aabaabaababaabaabaab
For a closed trajectory, the last label (corresponding also to the
starting point) is not considered::
sage: v = t.tangent_vector(0, (1/5,1/7), (1,1))
sage: l = v.straight_line_trajectory()
sage: l.flow(10)
sage: l.is_closed()
True
sage: l.coding(alphabet)
['a', 'b']
Check that the saddle connections that are obtained in the torus get the
expected coding::
sage: for _ in range(10):
....: x = ZZ.random_element(1,30)
....: y = ZZ.random_element(1,30)
....: x,y = x/gcd(x,y), y/gcd(x,y)
....: v = t.tangent_vector(0, (0,0), (x,y))
....: l = v.straight_line_trajectory()
....: l.flow(200); l.flow(-200)
....: w = ''.join(l.coding(alphabet))
....: assert Word(w+'ab'+w).is_balanced()
....: assert Word(w+'ba'+w).is_balanced()
....: assert w.count('a') == y-1
....: assert w.count('b') == x-1
"""
ans = []
segments = self.segments()
s = segments[0]
start = s.start()
if start._position._position_type == start._position.EDGE_INTERIOR:
p = s.polygon_label()
e = start._position.get_edge()
lab = (p,e) if alphabet is None else alphabet.get((p,e))
if lab is not None:
ans.append(lab)
for i in range(len(segments)-1):
s = segments[i]
end = s.end()
p = s.polygon_label()
e = end._position.get_edge()
lab = (p,e) if alphabet is None else alphabet.get((p,e))
if lab is not None:
ans.append(lab)
s = segments[-1]
end = s.end()
if end._position._position_type == end._position.EDGE_INTERIOR and \
end.invert() != start:
p = s.polygon_label()
e = end._position.get_edge()
lab = (p,e) if alphabet is None else alphabet.get((p,e))
if lab is not None:
ans.append(lab)
return ans
def initial_tangent_vector(self):
return self.segment(0).start()
def terminal_tangent_vector(self):
return self.segment(-1).end()
def intersects(self, traj, count_singularities = False):
r"""
Return true if this trajectory intersects the other trajectory.
"""
try:
next(self.intersections(traj, count_singularities = count_singularities))
except StopIteration:
return False
return True
def intersections(self, traj, count_singularities = False, include_segments = False):
r"""
Return the set of SurfacePoints representing the intersections
of this trajectory with the provided trajectory or SaddleConnection.
Singularities will be included only if count_singularities is
set to True.
If include_segments is True, it iterates over triples consisting of the SurfacePoint,
and two sets. The first set consists of segments of this trajectory that contain the point
and the second set consists of segments of traj that contain the point.
EXAMPLES::
sage: from flatsurf import *
sage: s=translation_surfaces.square_torus()
sage: traj1 = s.tangent_vector(0,(1/2,0),(1,1)).straight_line_trajectory()
sage: traj1.flow(3)
sage: traj1.is_closed()
True
sage: traj2 = s.tangent_vector(0,(1/2,0),(-1,1)).straight_line_trajectory()
sage: traj2.flow(3)
sage: traj2.is_closed()
True
sage: sum(1 for _ in traj1.intersections(traj2))
2
"""
# Partition the segments making up the trajectories by label.
if isinstance(traj,SaddleConnection):
traj = traj.trajectory()
lab_to_seg1 = {}
for seg1 in self.segments():
label = seg1.polygon_label()
if label in lab_to_seg1:
lab_to_seg1[label].append(seg1)
else:
lab_to_seg1[label] = [seg1]
lab_to_seg2 = {}
for seg2 in traj.segments():
label = seg2.polygon_label()
if label in lab_to_seg2:
lab_to_seg2[label].append(seg2)
else:
lab_to_seg2[label] = [seg2]
intersection_points = set()
if include_segments:
segments={}
for label,seg_list_1 in iteritems(lab_to_seg1):
if label in lab_to_seg2:
seg_list_2 = lab_to_seg2[label]
for seg1 in seg_list_1:
for seg2 in seg_list_2:
x = line_intersection(seg1.start().point(),
seg1.start().point()+seg1.start().vector(),
seg2.start().point(),
seg2.start().point()+seg2.start().vector())
if x is not None:
pos = self._s.polygon(seg1.polygon_label()).get_point_position(x)
if pos.is_inside() and (count_singularities or not pos.is_vertex()):
new_point = self._s.surface_point(seg1.polygon_label(),x)
if new_point not in intersection_points:
intersection_points.add(new_point)
if include_segments:
segments[new_point]=({seg1},{seg2})
else:
yield new_point
elif include_segments:
segments[new_point][0].append(seg1)
segments[new_point][1].append(seg2)
if include_segments:
for x in iteritems(segments):
yield x
class StraightLineTrajectory(AbstractStraightLineTrajectory):
r"""
Straight-line trajectory in a similarity surface.
EXAMPLES::
# Demonstrate the handling of edges
sage: from flatsurf import *
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectory
sage: p = SymmetricGroup(2)('(1,2)')
sage: s = translation_surfaces.origami(p,p)
sage: traj = StraightLineTrajectory(s.tangent_vector(1,(0,0),(1,0)))
sage: traj
Straight line trajectory made of 1 segments from (0, 0) in polygon 1 to (1, 1) in polygon 2
sage: traj.is_saddle_connection()
True
sage: traj2 = StraightLineTrajectory(s.tangent_vector(1,(0,0),(0,1)))
sage: traj2
Straight line trajectory made of 1 segments from (1, 0) in polygon 2 to (0, 1) in polygon 1
sage: traj2.is_saddle_connection()
True
"""
def __init__(self, tangent_vector):
self._segments = deque()
seg = SegmentInPolygon(tangent_vector)
self._segments.append(seg)
self._setup_forward()
self._setup_backward()
self._s=tangent_vector.surface()
def surface(self):
return self._s
def segment(self, i):
r"""
EXAMPLES::
sage: from flatsurf import *
sage: O = translation_surfaces.regular_octagon()
sage: v = O.tangent_vector(0, (1,1), (33,45))
sage: L = v.straight_line_trajectory()
sage: L.segment(0)
Segment in polygon 0 starting at (4/15, 0) and ending at (11/26*a +
1, 15/26*a + 1)
sage: L.flow(-1)
sage: L.segment(0)
Segment in polygon 0 starting at (-1/2*a, 7/22*a + 7/11) and ending
at (4/15, a + 1)
sage: L.flow(1)
sage: L.segment(2)
Segment in polygon 0 starting at (-1/13*a, 1/13*a) and ending at
(9/26*a + 11/13, 17/26*a + 15/13)
"""
return self.segments()[i]
def combinatorial_length(self):
return len(self.segments())
def segments(self):
return self._segments
def _setup_forward(self):
v = self.terminal_tangent_vector()
if v.is_based_at_singularity():
self._forward = None
else:
self._forward = v.invert()
def _setup_backward(self):
v = self.initial_tangent_vector()
if v.is_based_at_singularity():
self._backward = None
else:
self._backward = v.invert()
def is_forward_separatrix(self):
return self._forward is None
def is_backward_separatrix(self):
return self._backward is None
def is_saddle_connection(self):
return (self._forward is None) and (self._backward is None)
def is_closed(self):
r"""
Test whether this is a closed trajectory.
By convention, by a closed trajectory we mean a trajectory without any
singularities.
.. SEEALSO::
:meth:`is_saddle_connection`
EXAMPLES:
An example in a cone surface covered by the torus::
sage: from flatsurf import *
sage: p = polygons.square()
sage: s = Surface_list(base_ring=p.base_ring())
sage: s.add_polygon(p,[(0,3),(0,2),(0,1),(0,0)])
0
sage: s.set_immutable()
sage: t = RationalConeSurface(s)
sage: v = t.tangent_vector(0, (1/2,0), (1/3,7/5))
sage: l = v.straight_line_trajectory()
sage: l.is_closed()
False
sage: l.flow(100)
sage: l.is_closed()
True
sage: v = t.tangent_vector(0, (1/2,0), (1/3,2/5))
sage: l = v.straight_line_trajectory()
sage: l.flow(100)
sage: l.is_closed()
False
sage: l.is_saddle_connection()
False
sage: l.flow(-100)
sage: l.is_saddle_connection()
True
"""
return (not self.is_forward_separatrix()) and \
self._forward.differs_by_scaling(self.initial_tangent_vector())
def flow(self, steps):
r"""
Append or preprend segments to the trajectory.
If steps is positive, attempt to append this many segments.
If steps is negative, attempt to prepend this many segments.
Will fail gracefully the trajectory hits a singularity or closes up.
EXAMPLES::
sage: from flatsurf import *
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (1,-1/2), (3,-1))
sage: traj = v.straight_line_trajectory()
sage: traj
Straight line trajectory made of 1 segments from (1/4, -1/4) in polygon 0 to (2, -5/6) in polygon 0
sage: traj.flow(1)
sage: traj
Straight line trajectory made of 2 segments from (1/4, -1/4) in polygon 0 to (61/36, 11/12) in polygon 1
sage: traj.flow(-1)
sage: traj
Straight line trajectory made of 3 segments from (15/16, 45/16) in polygon 1 to (61/36, 11/12) in polygon 1
"""
while steps>0 and \
(not self.is_forward_separatrix()) and \
(not self.is_closed()):
self._segments.append(SegmentInPolygon(self._forward))
self._setup_forward()
steps -= 1
while steps<0 and \
(not self.is_backward_separatrix()) and \
(not self.is_closed()):
self._segments.appendleft(SegmentInPolygon(self._backward).invert())
self._setup_backward()
steps += 1
class StraightLineTrajectoryTranslation(AbstractStraightLineTrajectory):
r"""
Straight line trajectory in a translation surface.
This is similar to :class:`StraightLineTrajectory` but implemented using
interval exchange maps. It should be faster than the implementation via
segments and flowing in polygons.
This class only stores a list of triples ``(p, e, x)`` where:
- ``p`` is a label of a polygon
- ``e`` is the number of some edge in ``p``
- ``x`` is the position of the point in ``e`` (be careful that it is not
necessarily a number between 0 and 1. It is given relatively to the length
of the induced interval in the iet)
(see the methods :meth:`_prev` and :meth:`_next`)
"""
def __init__(self, tangent_vector):
t = tangent_vector.polygon_label()
self._vector = tangent_vector.vector()
self._s = tangent_vector.surface()
seg = SegmentInPolygon(tangent_vector)
if seg.is_edge():
self._points = None
self._edge = seg
return
start = seg.start()
pos = start._position
if pos._position_type == pos.EDGE_INTERIOR:
i = pos.get_edge()
elif pos._position_type == pos.VERTEX:
i = pos.get_vertex()
else:
raise RuntimeError("PROBLEM!")
p = start.polygon_label()
poly = self._s.polygon(p)
T = self._get_iet(p)
x = get_linearity_coeff(poly.vertex(i+1) - poly.vertex(i),
start.point() - poly.vertex(i))
x *= T.length_bot(i)
self._points = deque() # we store triples (lab, edge, rel_pos)
self._points.append((p, i, x))
def _next(self, p, e, x):
r"""
Return the image of ``(p, e, x)``
EXAMPLES::
sage: from flatsurf import *
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation
sage: S = SymmetricGroup(3)
sage: r = S('(1,2)')
sage: u = S('(1,3)')
sage: o = translation_surfaces.origami(r,u)
sage: v = o.tangent_vector(1, (1/3,1/7), (5,13))
sage: L = StraightLineTrajectoryTranslation(v)
sage: t0 = (1,0,1/3)
sage: t1 = L._next(*t0)
sage: t2 = L._next(*t1)
sage: t0,t1,t2
((1, 0, 1/3), (3, 0, 16/3), (1, 0, 31/3))
sage: assert L._previous(*t2) == t1
sage: assert L._previous(*t1) == t0
"""
e, x = self._get_iet(p).forward_image(e, x)
p, e = self._s.opposite_edge(p, e)
return (p, e, x)
def _previous(self, p, e, x):
r"""
Return the preimage of ``(p, e, x)``
"""
p, e = self._s.opposite_edge(p, e)
e, x = self._get_iet(p).backward_image(e, x)
return (p, e, x)
def combinatorial_length(self):
if self._points is None:
return 1
return len(self._points)
def _get_iet(self, label):
polygon = self._s.polygon(label)
try:
return self._iets[polygon]
except AttributeError:
self._iets = {polygon: polygon.flow_map(self._vector)}
except KeyError:
self._iets[polygon] = polygon.flow_map(self._vector)
return self._iets[polygon]
def segment(self, i):
r"""
EXAMPLES::
sage: from flatsurf import *
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation
sage: O = translation_surfaces.regular_octagon()
sage: v = O.tangent_vector(0, (1,1), (33,45))
sage: L = StraightLineTrajectoryTranslation(v)
sage: L.segment(0)
Segment in polygon 0 starting at (4/15, 0) and ending at (11/26*a +
1, 15/26*a + 1)
sage: L.flow(-1)
sage: L.segment(0)
Segment in polygon 0 starting at (-1/2*a, 7/22*a + 7/11) and ending
at (4/15, a + 1)
sage: L.flow(1)
sage: L.segment(2)
Segment in polygon 0 starting at (-1/13*a, 1/13*a) and ending at
(9/26*a + 11/13, 17/26*a + 15/13)
"""
if self._points is None:
return self._edge
lab, e0, x0 = self._points[i]
iet = self._get_iet(lab)
e1, x1 = iet.forward_image(e0, x0)
poly = self._s.polygon(lab)
l0 = iet.length_bot(e0)
l1 = iet.length_top(e1)
point0 = poly.vertex(e0) + poly.edge(e0) * x0/l0
point1 = poly.vertex(e1) + poly.edge(e1) * (l1-x1)/l1
v0 = self._s.tangent_vector(lab, point0, self._vector, ring=self._vector.base_ring())
v1 = self._s.tangent_vector(lab, point1, -self._vector, ring=self._vector.base_ring())
return SegmentInPolygon(v0,v1)
def segments(self):
r"""
EXAMPLES::
sage: from flatsurf import *
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation
sage: s = translation_surfaces.square_torus()
sage: v = s.tangent_vector(0, (0,0), (1,1+AA(5).sqrt()), ring=AA)
sage: L = StraightLineTrajectoryTranslation(v)
sage: L.flow(2)
sage: L.segments()
[Segment in polygon 0 starting at (0, 0) and ending at (0.3090169943749474?, 1),
Segment in polygon 0 starting at (0.3090169943749474?, 0) and ending at (0.618033988749895?, 1),
Segment in polygon 0 starting at (0.618033988749895?, 0) and ending at (0.9270509831248423?, 1)]
"""
return [self.segment(i) for i in range(self.combinatorial_length())]
def is_closed(self):
if self._points is None:
raise NotImplementedError
return self._points[0] == self._next(*self._points[-1])
def is_forward_separatrix(self):
if self._points is None:
return True
p1,e1,x1 = self._next(*self._points[-1])
return x1.is_zero()
def is_backward_separatrix(self):
return self._points is None or self._points[0][2].is_zero()
def is_saddle_connection(self):
r"""
EXAMPLES::
sage: from flatsurf import *
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation
sage: torus = translation_surfaces.square_torus()
sage: v = torus.tangent_vector(0, (1/2,1/2), (1,1))
sage: S = StraightLineTrajectoryTranslation(v)
sage: S.is_saddle_connection()
True
sage: v = torus.tangent_vector(0, (1/3,2/3), (1,2))
sage: S = StraightLineTrajectoryTranslation(v)
sage: S.is_saddle_connection()
False
sage: S.flow(1)
sage: S.is_saddle_connection()
True
"""
return self._points is None or (self.is_forward_separatrix() and self.is_backward_separatrix())
def flow(self, steps):
if self._points is None:
return
if steps > 0:
t = self._points[-1]
for i in range(steps):
t = self._next(*t)
if t == self._points[0] or t[2].is_zero():
break
self._points.append(t)
elif steps < 0:
t = self._points[0]
for i in range(-steps):
if t[2].is_zero():
break
t = self._previous(*t)
if t == self._points[-1]:
# closed curve or backward separatrix
break
self._points.appendleft(t)
|
videlec/sage-flatsurf
|
flatsurf/geometry/straight_line_trajectory.py
|
Python
|
gpl-2.0
| 31,149
|
import numpy as np
import matplotlib.pyplot as plt
from stimulus import *
from myintegrator import *
from functions import *
import matplotlib.gridspec as gridspec
import cPickle as pickle
#-------------------------------------------------------------------
#-------------------------------------------------------------------
#-----------------Stimulation of Populations------------------------
#-------------------------------------------------------------------
# settin`g up the simulation
#times = 100
#delta = 50
#period = 30
patterns=np.identity(n)
patterns=[patterns[:,i] for i in range(n)]
mystim=stimulus(patterns,lagStim,delta,period,times)
mystim.inten=amp
#integrator
npts=int(np.floor(delay/dt)+1) # points delay
tmax=times*(lagStim+n*(period+delta))+100.+mystim.delay_begin
thetmax=tmax+40000
#t = np.linspace(0,thetmax,100000)
u,uI,connectivity,WEI,t = pickle.load(open('dyn_stimulation_SA.p','rb'))
#-----------------------------------------------------------------------------------------
#-------------------------------- Dynamics-----------------------------------------------
#----------------------------------------------------------------------------------------
#initial conditions
tmaxdyn=500
mystim.inten=0.
theintegrator=myintegrator(delay,dt,n,tmaxdyn)
theintegrator.fast=False
#integration
u_ret,uI_ret,connectivity_ret,WEI_ret,t_ret = pickle.load(open('dyn_retrieval_SA.p','rb'))
u_ret_PA,uI_ret_PA,connectivity_ret_PA,WEI_ret_PA,t_ret_PA = pickle.load(open('dyn_retrieval_PA.p','rb'))
#-------------------------------------------------------------------
#-----------------Stimulation of Populations------------------------
#-------------------------------------------------------------------
rc={'axes.labelsize': 32, 'font.size': 30, 'legend.fontsize': 25.0, 'axes.titlesize': 35}
plt.rcParams.update(**rc)
plt.rcParams['image.cmap'] = 'jet'
fig = plt.figure(figsize=(19, 11))
gs = gridspec.GridSpec(2, 2)#height_ratios=[3,3,2])
gs.update(wspace=0.44,hspace=0.03)
gs0 = gridspec.GridSpec(2, 2)
gs0.update(wspace=0.05,hspace=0.4,left=0.54,right=1.,top=0.88,bottom=0.1106)
#gs1.update(wspace=0.05,hspace=0.4,left=0.1245,right=1.,top=0.21,bottom=0.05)
# Excitatory and Inhibitory weights
ax1A = plt.subplot(gs[0,0])
ax1B = plt.subplot(gs[1,0])
#sequence
axSA = plt.subplot(gs0[1,0])
axPA = plt.subplot(gs0[1,1])
#stimulation
ax2B= plt.subplot(gs0[0,0])
ax2C= plt.subplot(gs0[0,1])
colormap = plt.cm.Accent
ax2B.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
ax2B.plot(t,phi(u[:,:],theta,uc),lw=3)
mystim.inten=.1
elstim=np.array([sum(mystim.stim(x)) for x in t])
ax2B.plot(t,elstim,'k',lw=3)
ax2B.fill_between(t,np.zeros(len(t)),elstim,alpha=0.5,edgecolor='k', facecolor='darkgrey')
ax2B.set_ylim([0,1.2])
ax2B.set_xlim([0,600])
ax2B.set_yticks([0.5,1])
ax2B.set_xticks([0,200,400])
ax2B.set_xticklabels([0.,.2,.4])
ax2B.set_xlabel('Time (s)')
ax2B.set_ylabel('Rate')
ax2B.set_title('(B)',x=1.028,y=1.04)
ax2C.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
ax2C.plot(t,phi(u[:,:],theta,uc),lw=3)
mystim.inten=.1
elstim=np.array([sum(mystim.stim(x)) for x in t])
ax2C.plot(t,elstim,'k',lw=3)
ax2C.fill_between(t,np.zeros(len(t)),elstim,alpha=0.5,edgecolor='k', facecolor='darkgrey')
ax2C.set_xlim([89475,90075])
ax2C.set_xticks([89500,89700,89900])
ax2C.set_xticklabels([89.5,89.7,89.9])
ax2C.set_ylim([0,1.2])
ax2C.set_yticks([])
ax2C.set_xlabel('Time (s)')
#ax2C.set_ylabel('Rate')
#----------------------------------------------------------------------
#------------Synaptic Weights------------------------------------------
#----------------------------------------------------------------------
for i in range(10):
ax1A.plot(t,connectivity[:,i,i],'c',lw=3)
for i in range(0,9):
ax1A.plot(t,connectivity[:,i+1,i],'y',lw=3)
for i in range(8):
ax1A.plot(t,connectivity[:,i+2,i],'g',lw=3)
for i in range(9):
ax1A.plot(t,connectivity[:,i,i+1],'r',lw=3)
for i in range(8):
ax1A.plot(t,connectivity[:,i,i+2],'b',lw=3)
ax1A.set_xticks([])
ax1A.axvline(x=tmax,ymin=0,ymax=2.,linewidth=2,ls='--',color='gray',alpha=0.7)
#ax1A.set_xticklabels([0,50,100,150])
ax1A.set_ylim([0,1.8])
ax1A.set_xlim([0,250000])
ax1A.set_yticks([0,0.5,1.,1.5])
#ax1A.set_xlabel('Time (s)')
ax1A.set_ylabel('Synaptic Weights')
ax1A.set_title('(A)',y=1.04)
#------------------------------------------------------------------------
#-------------Homeostatic Variable --------------------------------------
#------------------------------------------------------------------------
ax1B.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
ax1B.plot(t,WEI[:],lw=3)
ax1B.axvline(x=tmax,ymin=0,ymax=2.,linewidth=2,ls='--',color='gray',alpha=0.7)
ax1B.set_ylim([0.,3.4])
ax1B.set_yticks([0.,1.,2.,3.])
ax1B.set_xlim([0,250000])
ax1B.set_xticks([0,50000,100000,150000,200000,250000])
ax1B.set_xticklabels([0,50,100,150,200,250])
ax1B.set_xlabel('Time (s)')
ax1B.set_ylabel(r'$W_{EI}$')
#plot sequence
axSA.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
axSA.plot(t_ret,phi(u_ret[:,:],theta,uc),lw=5)
axSA.set_ylim([0,1.2])
axSA.set_xlim([0,370])
axSA.set_xticks([0,100,200,300])
axSA.set_yticks([0.5,1])
axSA.set_xlabel('Time (ms)')
axSA.set_ylabel('Rate')
#axSA.set_title('(C)',y=1.04)
axSA.set_title('(C)',x=1.028,y=1.04)
# plot PA
axPA.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
axPA.plot(t_ret_PA,phi(u_ret_PA[:,:],theta,uc),lw=5)
axPA.set_ylim([0,1.2])
axPA.set_xlim([0,370])
axPA.set_xticks([0,100,200,300])
axPA.set_yticks([])
axPA.set_xlabel('Time (ms)')
#plt.show()
plt.savefig('fig6.pdf', bbox_inches='tight')
|
ulisespereira/PereiraBrunel2016
|
figure7/plotting.py
|
Python
|
gpl-2.0
| 5,736
|
# coding: utf-8
from qgis.gui import QgsColorWheel
color_wheel = QgsColorWheel()
def on_color_wheel_changed(color):
print(color)
color_wheel.colorChanged.connect(on_color_wheel_changed)
color_wheel.show()
|
webgeodatavore/pyqgis-samples
|
gui/qgis-sample-QgsColorWheel.py
|
Python
|
gpl-2.0
| 214
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
### 2008-2015 Charlie Barnes.
### This program is free software; you can redistribute it and/or modify
### it under the terms of the GNU General Public License as published by
### the Free Software Foundation; either version 2 of the License, or
### (at your option) any later version.
### This program is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
### You should have received a copy of the GNU General Public License
### along with this program; if not, write to the Free Software
### Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
def repeat_to_length(string_to_expand, length):
return (string_to_expand * ((length/len(string_to_expand))+1))[:length]
try:
from fpdf import FPDF
except ImportError:
from pyfpdf import FPDF
class PDF(FPDF):
def __init__(self, orientation,unit,format):
FPDF.__init__(self, orientation=orientation,unit=unit,format=format)
self.toc = []
self.numbering = False
self.num_page_num = 0
self.toc_page_break_count = 1
self.set_left_margin(10)
self.set_right_margin(10)
self.do_header = False
self.type = None
self.toc_length = 0
self.doing_the_list = False
self.vcs = []
self.toc_page_num = 2
self.dataset = None
self.orientation = orientation
self.orientation_changes = [0]
def p_add_page(self):
#if(self.numbering):
self.add_page()
self.num_page_num = self.num_page_num + 1
def num_page_no(self):
return self.num_page_num
def startPageNums(self):
self.numbering = True
def stopPageNums(self):
self.numbering = False
def TOC_Entry(self, txt, level=0):
self.toc.append({'t':txt, 'l':level, 'p':str(self.num_page_no()+self.toc_length)})
def insertTOC(self, location=1, labelSize=20, entrySize=10, tocfont='Helvetica', label='Table of Contents'):
#make toc at end
self.stopPageNums()
self.section = 'Contents'
self.p_add_page()
tocstart = self.page
self.set_font('Helvetica', '', 20)
self.multi_cell(0, 20, 'Contents', 0, 'J', False)
used_pages = []
link_abscissa = {}
for t in self.toc:
#Offset
level = t['l']
if level > 0:
self.cell(level*8)
weight = ''
if level == 0:
weight = 'B'
txxt = t['t']
self.set_font(tocfont, weight, entrySize)
strsize = self.get_string_width(txxt)
self.cell(strsize+2, self.font_size+2, txxt, 0, 0, '', False)
#store the TOC links & position for later use
if self.page_no() not in link_abscissa.keys():
link_abscissa[self.page_no()] = []
link_abscissa[self.page_no()].append([int(t['p']), self.y])
#Filling dots
self.set_font(tocfont, '', entrySize)
PageCellSize = self.get_string_width(t['p'])+2
w = self.w-self.l_margin-self.r_margin-PageCellSize-(level*8)-(strsize+2)
nb = w/self.get_string_width('.')
dots = repeat_to_length('.', int(nb))
self.cell(w, self.font_size+2, dots, 0, 0, 'R')
#Page number of the toc entry
self.cell(PageCellSize, self.font_size+2, str(int(t['p'])), 0, 1, 'R')
if self.toc_page_break_count%2 != 0:
self.section = ''
self.toc_page_break_count = self.toc_page_break_count + 1
self.p_add_page()
#Grab it and move to selected location
n = self.page
ntoc = n - tocstart + 1
last = []
#store toc pages
i = tocstart
while i <= n:
last.append(self.pages[i])
i = i + 1
#move pages
i = tocstart
while i >= (location-1):
self.pages[i+ntoc] = self.pages[i]
i = i - 1
#Put toc pages at insert point
i = 0
while i < ntoc:
self.pages[location + i] = last[i]
#loop through all the TOC links for this page and add them
try:
for linkdata in link_abscissa[tocstart+i]:
self.page = location + i
link = self.add_link()
self.set_link(link, y=0, page=linkdata[0])
self.link(x=self.l_margin, y=linkdata[1], w=self.w-self.r_margin, h=self.font_size+2, link=link)
except KeyError:
pass
i = i + 1
self.page = n
def header(self):
if self.do_header:
self.set_font('Helvetica', '', 8)
self.set_text_color(0, 0, 0)
self.set_line_width(0.1)
if (self.section <> 'Contents' and self.page_no()%2 == 0) or (self.section == 'Contents' and self.toc_page_break_count%2 == 0):
self.cell(0, 5, self.section, 'B', 0, 'L', 0) # even page header
self.cell(0, 5, self.title.replace('\n', ' - '), 'B', 1, 'R', 0) # even page header
elif (self.section <> 'Contents' and self.page_no()%2 == 1) or (self.section == 'Contents' and self.toc_page_break_count%2 == 1):
self.cell(0, 5, self.section, 'B', 1, 'R', 0) #odd page header
if self.type == 'list' and self.doing_the_list == True:
col_width = 12.7#((self.w - self.l_margin - self.r_margin)/2)/7.5
#vc headings
self.set_font('Helvetica', '', 10)
self.set_line_width(0.0)
self.set_y(20)
self.set_x(self.w-(7+col_width+(((col_width*3)+(col_width/4))*len(self.vcs))))
self.cell(col_width, 5, '', '0', 0, 'C', 0)
for vc in sorted(self.vcs):
if vc == None:
vc_head_text = ''
else:
vc_head_text = ''.join(['VC',vc])
self.cell((col_width*3), 5, vc_head_text, '0', 0, 'C', 0)
self.cell(col_width/4, 5, '', '0', 0, 'C', 0)
self.ln()
self.set_x(self.w-(7+col_width+(((col_width*3)+(col_width/4))*len(self.vcs))))
self.set_font('Helvetica', '', 8)
self.cell(col_width, 5, '', '0', 0, 'C', 0)
for vc in sorted(self.vcs):
#colum headings
self.cell(col_width, 5, ' '.join([self.dataset.config.get('List', 'distribution_unit'), 'sqs']), '0', 0, 'C', 0)
self.cell(col_width, 5, 'Records', '0', 0, 'C', 0)
self.cell(col_width, 5, 'Last in', '0', 0, 'C', 0)
self.cell(col_width/4, 5, '', '0', 0, 'C', 0)
self.y0 = self.get_y()
if self.section == 'Contributors' or self.section == 'Contents':
self.set_y(self.y0 + 20)
def footer(self):
self.set_y(-20)
self.set_font('Helvetica','',8)
#only show page numbers in the main body
#if self.num_page_no() >= 4 and self.section != 'Contents' and self.section != 'Index' and self.section != 'Contributors' and self.section != 'References' and self.section != 'Introduction' and self.section != '':
if self.num_page_no() >= 5 and self.section != 'Contents' and self.section != '' and self.section != 'Index' and self.section != 'Contributors' and self.section != 'References' and self.section != 'Introduction':
self.cell(0, 10, str(self.num_page_no()+self.toc_length), '', 0, 'C')
def setcol(self, col):
self.col = col
x = 10 + (col*100)
self.set_left_margin(x)
self.set_x(x)
def accept_page_break(self):
if self.section == 'Contents':
self.toc_page_break_count = self.toc_page_break_count + 1
if self.section == 'Contributors':
self.set_y(self.y0+20)
if self.section == 'Index':
if (self.orientation == 'Portrait' and self.col == 0) or (self.orientation == 'Landscape' and (self.col == 0 or self.col == 1)) :
self.setcol(self.col + 1)
self.set_y(self.y0+20)
return False
else:
self.setcol(0)
self.p_add_page()
self.set_y(self.y0+20)
return False
else:
return True
|
charlie-barnes/dipper-stda
|
pdf.py
|
Python
|
gpl-2.0
| 8,846
|
# pywws - Python software for USB Wireless Weather Stations
# http://github.com/jim-easterbrook/pywws
# Copyright (C) 2008-15 Jim Easterbrook jim@jim-easterbrook.me.uk
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Low level USB interface to weather station, using PyUSB v0.4.
Introduction
============
This module handles low level communication with the weather station
via the `PyUSB <http://sourceforge.net/apps/trac/pyusb/>`_ library. It
is one of several USB device modules, each of which uses a different
USB library interface. See :ref:`Installation - USB
library<dependencies-usb>` for details.
Testing
=======
Run :py:mod:`pywws.testweatherstation` with increased verbosity so it
reports which USB device access module is being used::
python -m pywws.testweatherstation -vv
18:28:09:pywws.weatherstation.CUSBDrive:using pywws.device_pyusb
0000 55 aa ff ff ff ff ff ff ff ff ff ff ff ff ff ff 05 20 01 41 11 00 00 00 81 00 00 0f 05 00 e0 51
0020 03 27 ce 27 00 00 00 00 00 00 00 12 02 14 18 27 41 23 c8 00 00 00 46 2d 2c 01 64 80 c8 00 00 00
0040 64 00 64 80 a0 28 80 25 a0 28 80 25 03 36 00 05 6b 00 00 0a 00 f4 01 12 00 00 00 00 00 00 00 00
0060 00 00 49 0a 63 12 05 01 7f 00 36 01 60 80 36 01 60 80 bc 00 7b 80 95 28 12 26 6c 28 25 26 c8 01
0080 1d 02 d8 00 de 00 ff 00 ff 00 ff 00 00 11 10 06 01 29 12 02 01 19 32 11 09 09 05 18 12 01 22 13
00a0 14 11 11 04 15 04 11 12 17 05 12 11 09 02 15 26 12 02 11 07 05 11 09 02 15 26 12 02 11 07 05 11
00c0 09 10 09 12 12 02 02 12 38 12 02 07 19 00 11 12 16 03 27 12 02 03 11 00 11 12 16 03 27 11 12 26
00e0 21 32 11 12 26 21 32 12 02 06 19 57 12 02 06 19 57 12 02 06 19 57 12 02 06 19 57 12 02 06 19 57
API
===
"""
__docformat__ = "restructuredtext en"
import platform
import usb
class USBDevice(object):
def __init__(self, idVendor, idProduct):
"""Low level USB device access via PyUSB library.
:param idVendor: the USB "vendor ID" number, for example 0x1941.
:type idVendor: int
:param idProduct: the USB "product ID" number, for example 0x8021.
:type idProduct: int
"""
dev = self._find_device(idVendor, idProduct)
if not dev:
raise IOError("Weather station device not found")
self.devh = dev.open()
if not self.devh:
raise IOError("Open device failed")
self.devh.reset()
## if platform.system() is 'Windows':
## self.devh.setConfiguration(1)
try:
self.devh.claimInterface(0)
except usb.USBError:
# claim interface failed, try detaching kernel driver first
if not hasattr(self.devh, 'detachKernelDriver'):
raise RuntimeError(
"Please upgrade pyusb (or python-usb) to 0.4 or higher")
try:
self.devh.detachKernelDriver(0)
self.devh.claimInterface(0)
except usb.USBError:
raise IOError("Claim interface failed")
# device may have data left over from an incomplete read
for i in range(4):
try:
self.devh.interruptRead(0x81, 8, 1200)
except usb.USBError:
break
def __del__(self):
if self.devh:
try:
self.devh.releaseInterface()
except usb.USBError:
# interface was not claimed. No problem
pass
def _find_device(self, idVendor, idProduct):
"""Find a USB device by product and vendor id."""
for bus in usb.busses():
for device in bus.devices:
if (device.idVendor == idVendor and
device.idProduct == idProduct):
return device
return None
def read_data(self, size):
"""Receive data from the device.
If the read fails for any reason, an :obj:`IOError` exception
is raised.
:param size: the number of bytes to read.
:type size: int
:return: the data received.
:rtype: list(int)
"""
result = self.devh.interruptRead(0x81, size, 1200)
if result is None or len(result) < size:
raise IOError('pywws.device_libusb.USBDevice.read_data failed')
return list(result)
def write_data(self, buf):
"""Send data to the device.
If the write fails for any reason, an :obj:`IOError` exception
is raised.
:param buf: the data to send.
:type buf: list(int)
:return: success status.
:rtype: bool
"""
result = self.devh.controlMsg(
usb.ENDPOINT_OUT + usb.TYPE_CLASS + usb.RECIP_INTERFACE,
usb.REQ_SET_CONFIGURATION, buf, value=0x200, timeout=50)
if result != len(buf):
raise IOError('pywws.device_libusb.USBDevice.write_data failed')
return True
|
3v1n0/pywws
|
src/pywws/device_pyusb.py
|
Python
|
gpl-2.0
| 5,584
|
import os,sys,urllib2
import xbmcplugin,xbmcgui
import xml.etree.ElementTree as ET
__addon__ = "SomaFM"
__addonid__ = "plugin.audio.somafm"
__version__ = "0.0.2"
def log(msg):
print "[PLUGIN] '%s (%s)' " % (__addon__, __version__) + str(msg)
log("Initialized!")
log(sys.argv)
rootURL = "http://somafm.com/"
#pluginPath = sys.argv[0]
handle = int(sys.argv[1])
query = sys.argv[2]
def getHeaders(withReferrer=None):
headers = {}
headers['User-Agent'] = 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3'
if withReferrer:
headers['Referrer'] = withReferrer
return headers
def getHTMLFor(url, withData=None, withReferrer=None):
url = rootURL + url
log("Get HTML for URL: " + url)
req = urllib2.Request(url, withData, getHeaders(withReferrer))
response = urllib2.urlopen(req)
data = response.read()
response.close()
return data
def addEntries():
somaXML = getHTMLFor(url="channels.xml")
channelsContainer = ET.fromstring(somaXML)
for stations in channelsContainer.findall(".//channel"):
title = stations.find('title').text
description = stations.find('description').text
if stations.find('largeimage') is not None:
img = rootURL + stations.find('largeimage').text.replace(rootURL,"")
else:
img = rootURL + stations.find('image').text.replace(rootURL,"")
url = rootURL + stations.find('fastpls').text.replace(rootURL,"")
log(title)
log(description)
log(img)
log(url)
li = xbmcgui.ListItem(title, description, thumbnailImage=img)
li.setProperty("IsPlayable","true")
xbmcplugin.addDirectoryItem(
handle=handle,
url=url,
listitem=li)
addEntries()
xbmcplugin.endOfDirectory(handle)
|
nils-werner/xbmc-somafm
|
default.py
|
Python
|
gpl-2.0
| 1,907
|
"""
Nonlinear cartoon+texture decomposition ipol demo web app
"""
from lib import base_app, build, http, image
from lib.misc import ctime
from lib.misc import prod
from lib.base_app import init_app
import shutil
import cherrypy
from cherrypy import TimeoutError
import os.path
import time
from math import ceil
class app(base_app):
""" nonlinear cartoon+texture decomposition """
title = "Cartoon+Texture Image Decomposition"
xlink_article = 'http://www.ipol.im/pub/art/2011/blmv_ct/'
input_nb = 1
input_max_pixels = 700 * 700 # max size (in pixels) of an input image
input_max_weight = 10 * 1024 * 1024 # max size (in bytes) of an input file
input_dtype = '3x8i' # input image expected data type
input_ext = '.png' # input image expected extension (ie file format)
is_test = False
def __init__(self):
"""
app setup
"""
# setup the parent class
base_dir = os.path.dirname(os.path.abspath(__file__))
base_app.__init__(self, base_dir)
# select the base_app steps to expose
# index() and input_xxx() are generic
base_app.index.im_func.exposed = True
base_app.input_select.im_func.exposed = True
base_app.input_upload.im_func.exposed = True
# params() is modified from the template
base_app.params.im_func.exposed = True
# result() is modified from the template
base_app.result.im_func.exposed = True
def build(self):
"""
program build/update
"""
# store common file path in variables
tgz_url = "http://www.ipol.im/pub/art/2011/blmv_ct/srcB.tar.gz"
tgz_file = self.dl_dir + "srcB.tar.gz"
progs = ["cartoonIpol"]
src_bin = dict([(self.src_dir + os.path.join("srcB", prog),
self.bin_dir + prog)
for prog in progs])
log_file = self.base_dir + "build.log"
# get the latest source archive
build.download(tgz_url, tgz_file)
# test if any dest file is missing, or too old
if all([(os.path.isfile(bin_file)
and ctime(tgz_file) < ctime(bin_file))
for bin_file in src_bin.values()]):
cherrypy.log("not rebuild needed",
context='BUILD', traceback=False)
else:
# extract the archive
build.extract(tgz_file, self.src_dir)
# build the programs
build.run("make -j4 -C %s %s"
% (self.src_dir + "srcB", " ".join(progs)),
stdout=log_file)
# save into bin dir
if os.path.isdir(self.bin_dir):
shutil.rmtree(self.bin_dir)
os.mkdir(self.bin_dir)
for (src, dst) in src_bin.items():
shutil.copy(src, dst)
# cleanup the source dir
shutil.rmtree(self.src_dir)
return
#
# PARAMETER HANDLING
#
def select_subimage(self, x0, y0, x1, y1):
"""
cut subimage from original image
"""
# draw selected rectangle on the image
imgS = image(self.work_dir + 'input_0.png')
imgS.draw_line([(x0, y0), (x1, y0), (x1, y1), (x0, y1), (x0, y0)],
color="red")
imgS.draw_line([(x0+1, y0+1), (x1-1, y0+1), (x1-1, y1-1), (x0+1, y1-1),
(x0+1, y0+1)], color="white")
imgS.save(self.work_dir + 'input_0s.png')
# crop the image
# try cropping from the original input image (if different from input_0)
im0 = image(self.work_dir + 'input_0.orig.png')
dx0 = im0.size[0]
img = image(self.work_dir + 'input_0.png')
dx = img.size[0]
if (dx != dx0) :
z = float(dx0)/float(dx)
im0.crop((int(x0*z), int(y0*z), int(x1*z), int(y1*z)))
# resize if cropped image is too big
if self.input_max_pixels and prod(im0.size) > self.input_max_pixels:
im0.resize(self.input_max_pixels, method="antialias")
img = im0
else :
img.crop((x0, y0, x1, y1))
# save result
img.save(self.work_dir + 'input_0.sel.png')
return
@cherrypy.expose
@init_app
def params(self, newrun=False, msg=None, x0=None, y0=None,
x1=None, y1=None, scale="3.0"):
"""
configure the algo execution
"""
if newrun:
self.clone_input()
if x0:
self.select_subimage(int(x0), int(y0), int(x1), int(y1))
return self.tmpl_out("params.html", msg=msg, x0=x0, y0=y0,
x1=x1, y1=y1, scale=scale)
@cherrypy.expose
@init_app
def rectangle(self, action=None, scale=None,
x=None, y=None, x0=None, y0=None):
"""
select a rectangle in the image
"""
if action == 'run':
if x == None:
#save parameter
try:
self.cfg['param'] = {'scale' : scale}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect scale parameter.")
else:
#save parameters
try:
self.cfg['param'] = {'scale' : scale,
'x0' : int(x0),
'y0' : int(y0),
'x1' : int(x),
'y1' : int(y)}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect parameters.")
# use the whole image if no subimage is available
try:
img = image(self.work_dir + 'input_0.sel.png')
except IOError:
img = image(self.work_dir + 'input_0.png')
img.save(self.work_dir + 'input_0.sel.png')
# go to the wait page, with the key
http.redir_303(self.base_url + "wait?key=%s" % self.key)
return
else:
# use a part of the image
if x0 == None:
# first corner selection
x = int(x)
y = int(y)
# draw a cross at the first corner
img = image(self.work_dir + 'input_0.png')
img.draw_cross((x, y), size=4, color="white")
img.draw_cross((x, y), size=2, color="red")
img.save(self.work_dir + 'input.png')
return self.tmpl_out("params.html", scale=scale, x0=x, y0=y)
else:
# second corner selection
x0 = int(x0)
y0 = int(y0)
x1 = int(x)
y1 = int(y)
# reorder the corners
(x0, x1) = (min(x0, x1), max(x0, x1))
(y0, y1) = (min(y0, y1), max(y0, y1))
assert (x1 - x0) > 0
assert (y1 - y0) > 0
#save parameters
try:
self.cfg['param'] = {'scale' : scale,
'x0' : x0,
'y0' : y0,
'x1' : x1,
'y1' : y1}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect parameters.")
#select subimage
self.select_subimage(x0, y0, x1, y1)
# go to the wait page, with the key
http.redir_303(self.base_url + "wait?key=%s" % self.key)
return
@cherrypy.expose
@init_app
def wait(self):
"""
run redirection
"""
http.refresh(self.base_url + 'run?key=%s' % self.key)
return self.tmpl_out("wait.html")
@cherrypy.expose
@init_app
def run(self):
"""
algorithm execution
"""
# read the parameters
scale = self.cfg['param']['scale']
# run the algorithm
stdout = open(self.work_dir + 'stdout.txt', 'w')
try:
run_time = time.time()
self.run_algo(scale, stdout=stdout)
self.cfg['info']['run_time'] = time.time() - run_time
except TimeoutError:
return self.error(errcode='timeout')
except RuntimeError:
return self.error(errcode='runtime')
stdout.close()
http.redir_303(self.base_url + 'result?key=%s' % self.key)
# archive
if self.cfg['meta']['original']:
ar = self.make_archive()
ar.add_file("input_0.orig.png", info="uploaded image")
# save processed image (if different from uploaded)
im0 = image(self.work_dir + 'input_0.orig.png')
dx0 = im0.size[0]
img = image(self.work_dir + 'input_0.png')
dx = img.size[0]
imgsel = image(self.work_dir + 'input_0.sel.png')
dxsel = imgsel.size[0]
if (dx != dx0) or (dxsel != dx):
ar.add_file("input_0.sel.png", info="original input image")
ar.add_file("cartoon.png", info="cartoon image")
ar.add_file("texture.png", info="texture image")
ar.add_info({"scale": scale})
ar.save()
return self.tmpl_out("run.html")
def run_algo(self, scale, stdout=None, timeout=False):
"""
the core algo runner
could also be called by a batch processor
this one needs no parameter
"""
#cartoon-texture images
p = self.run_proc(['cartoonIpol', 'input_0.sel.png', str(scale),
'cartoon.png', 'texture.png'],
stdout=None, stderr=None)
self.wait_proc(p, timeout)
@cherrypy.expose
@init_app
def result(self):
"""
display the algo results
"""
# read the parameters
scale = self.cfg['param']['scale']
try:
x0 = self.cfg['param']['x0']
except KeyError:
x0 = None
try:
y0 = self.cfg['param']['y0']
except KeyError:
y0 = None
try:
x1 = self.cfg['param']['x1']
except KeyError:
x1 = None
try:
y1 = self.cfg['param']['y1']
except KeyError:
y1 = None
(sizeX, sizeY)=image(self.work_dir + 'input_0.sel.png').size
# Resize for visualization (new size of the smallest dimension = 200)
zoom_factor = None
if (sizeX < 200) or (sizeY < 200):
if sizeX > sizeY:
zoom_factor = int(ceil(200.0/sizeY))
else:
zoom_factor = int(ceil(200.0/sizeX))
sizeX = sizeX*zoom_factor
sizeY = sizeY*zoom_factor
im = image(self.work_dir + 'input_0.sel.png')
im.resize((sizeX, sizeY), method="pixeldup")
im.save(self.work_dir + 'input_0_zoom.sel.png')
im = image(self.work_dir + 'cartoon.png')
im.resize((sizeX, sizeY), method="pixeldup")
im.save(self.work_dir + 'cartoon_zoom.png')
im = image(self.work_dir + 'texture.png')
im.resize((sizeX, sizeY), method="pixeldup")
im.save(self.work_dir + 'texture_zoom.png')
return self.tmpl_out("result.html", scale=scale,
x0=x0, y0=y0, x1=x1, y1=y1,
sizeY=sizeY, zoom_factor=zoom_factor)
|
juan-cardelino/matlab_demos
|
ipol_demo-light-1025b85/app_available/blmv_nonlinear_cartoon_texture_decomposition/app.py
|
Python
|
gpl-2.0
| 11,626
|
# HRGRN WebServices
# Copyright (C) 2016 Xinbin Dai, Irina Belyaeva
# This file is part of HRGRN WebServices API.
#
# HRGRN API is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# HRGRN API is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with HRGRN API. If not, see <http://www.gnu.org/licenses/>.
"""
API Exception Module
"""
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
no_geneID_parameter_error_msg = "No geneID/locus has been submitted!"
no_geneID_error_msg = "No node information found for geneID: "
# This function creates Not Found Exception
class NotFound(Exception):
pass
# This function creates Invalid Parameter Exception
class InvalidParameter(Exception):
pass
# This function creates Empty Response Exception
class EmptyResponse(Exception):
pass
# This function parses ADAMA API Exception
def parse_error(response):
_key_message = 'message'
_key_exception_type = 'exception'
if _key_message in response.keys():
message = response[_key_message]
if _key_exception_type in response.keys():
exception_type = response[_key_exception_type]
index = -1
if len(message) > 0 and exception_type == 'APIException':
index = message.rfind('API error')
log.debug("Index:" + str(index))
if index > -1:
message = message[len('API error')+1:len(message)]
log.debug("Error message:" + message)
return message
|
Arabidopsis-Information-Portal/hrgrn_webservices
|
services/hrgrn_search_path_by_locus/exception.py
|
Python
|
gpl-2.0
| 1,942
|
#*****************************************************************************
# Copyright (C) 2017 Lee Worden <worden dot lee at gmail dot com>
#
# Distributed under the terms of the GNU General Public License (GPL) v.2
# http://www.gnu.org/licenses/
#*****************************************************************************
import graph_latex_patched
from sage.all import *
import dynamicalsystems
from sage.misc.latex import _latex_file_
#from sage.symbolic.relation import solve
from sage.symbolic.function_factory import function
# constant 'enum' values for use with indexing
class deps:
index, sumover = range(0,2)
def plot_boxmodel_graph( g, filename=None, inline=False, figsize=(6,6), empty_vertices=(), ellipsis_vertices=(), **options ):
import itertools
#print 'ellipsis vertices:', ellipsis_vertices
lopts = {
'graphic_size': figsize,
'edge_labels': True,
'edge_thickness' : 0.02,
#'edge_fills': True,
#'edge_color': 'white',
#'edge_thickness': 0.05
'vertex_shape': 'rectangle',
'vertices_empty': { x:True for x in empty_vertices },
'vertex_colors': { x:'white' for x in ellipsis_vertices },
#'vertex_label_colors': { x:'white' for x in self._sources | self._sinks }
}
graph_latex_patched.setup_latex_preamble()
gop = graph_latex_patched.GraphLatex(g)
if inline:
lopts['margins'] = (0.5,0.5,0.5,0.5)
lopts.update( options )
#print 'lopts:',lopts
if 'latex_options' in options:
g.set_latex_options( **(options['latex_options']) )
gop.set_options( **lopts )
gl = gop.latex()
xp = ''
if inline:
#LT = '\n\\vspace{24pt}\n' + gl + '\n\\vspace{24pt}\n'
LT = gl
else:
if figsize[0] > 6.75 or figsize[1] > 9:
latex.add_package_to_preamble_if_available('geometry')
xp = '\\geometry{papersize={' + str(figsize[0] + 10) + 'cm,' + str(figsize[1] + 20) + 'cm}}\n'
LT = _latex_file_( dynamicalsystems.wrap_latex( gl ), title='', extra_preamble=xp )
if filename is not None:
#print 'plot to', filename
LF = open( filename, 'w' )
LF.write( LT )
LF.close()
return LT
## see BoxModel.plot_boxes() method below
## this is a transformation that supports plotting a box model
## graph using per capita flow rates rather than absolute rates
def per_capita_rates(g):
def to_per_capita(r,s):
if s in r.variables(): return (r/s).collect_common_factors().expand()
else:
print 'Warning: rate ', str(r), 'not converted to per capita'
return r
return DiGraph(
[ (v,w,to_per_capita(e,v)) for v,w,e in g.edge_iterator() ],
multiedges=True,
pos = g.get_pos()
)
class BoxModel(SageObject):
"""Parent class for all kinds of box models.
Note that since this gets its variables from a graph's vertices,
rather than from indexers, it can't be used in adaptive dynamics.
Subclasses that generate their boxes, maybe can.
"""
def __init__(self, graph,
vars=None,
parameters=None,
parameter_dependencies={},
sources=(),
sinks=(),
aggregate_names=(),
bindings=dynamicalsystems.Bindings()):
# we are given a directed graph whose vertex labels are state
# variables, representing fractions of total population,
# and whose edge labels are flow rates.
try:
graph.edge_iterator()
except AttributeError:
try:
self.__init__( graph._graph, graph._vars, sources=graph._sources, sinks=graph._sinks, aggregate_names=graph._aggregate_names, bindings=graph._bindings )
return
except AttributeError:
graph = DiGraph(graph)
self._graph = graph
self._graph.set_latex_options( edge_labels=True )
self._sources = Set( sources )
self._sinks = Set( sinks )
self._aggregate_names = aggregate_names
if vars is None:
vars = Set( graph.vertices() ) - self._sources - self._sinks
self._vars = list(vars)
print 'vars', self._vars, 'sources', self._sources, 'sinks', self._sinks
def getvars(r):
try: return r.variables()
except AttributeError: return []
if parameters is None:
# avoid namespace confusion with product.union
#print 'make parameters'; sys.stdout.flush()
parameters = sorted( list(
reduce(
lambda x,y: x.union(y),
(set(getvars(r)) for f,t,r in graph.edges()),
set()
).difference(
self._vars, self._sources, self._sinks, self._aggregate_names
)
), key=str )
#print 'made parameters'; sys.stdout.flush()
self._parameters = parameters
print 'parameters:', parameters
if False:
self._parameter_dependencies = parameter_dependencies
for p in self._parameters:
if p not in self._parameter_dependencies:
# infer connections between parameters and compartmentalization
# for now, simple rule:
# just connect it to the source variable of its arrow
# TODO: inference including defined quantities like N
#print 'infer dependencies for parameter', p
for v,w,e in self._graph.edges():
try: vs = getvars(e)
except AttributeError: vs = []
if p in vs:
pd = [ v ]
#print 'found', p, 'in arrow', e
#print 'infer dependency on', v
if p in self._parameter_dependencies and self._parameter_dependencies[p] != pd:
#print 'but already inferred', self._parameter_dependencies[p]
#print 'dependencies of parameter', p, 'are unclear, inferring no dependencies'
pd = []
self._parameter_dependencies[p] = pd
for p, pd in self._parameter_dependencies.items():
try: [ d[0] for d in pd ]
except: self._parameter_dependencies[p] = [ (d,deps.index) for d in pd ]
#print 'parameter dependencies:', self._parameter_dependencies
self._bindings = bindings
if self._graph.get_pos() is None:
pos = { v:(i,0) for i,v in enumerate(self._vars) }
pos.update( { v:(-1,i) for i,v in enumerate(self._sources) } )
pos.update( { v:(xx,i) for i,v in enumerate(self._sinks) for xx in (max(x for x,y in pos.itervalues()),) } )
self._graph.set_pos( pos )
def bind(self, *args, **vargs):
bindings = dynamicalsystems.Bindings( *args, **vargs )
bound_graph = DiGraph( [
(bindings(v),bindings(w),bindings(e)) for v,w,e in self._graph.edge_iterator()
],
multiedges=True,
pos = { bindings(v):p for v,p in self._graph.get_pos().items() } if self._graph.get_pos() is not None else None
)
return BoxModel(
bound_graph,
vars = [ bindings(v) for v in self._vars ],
sources = Set( bindings(v) for v in self._sources ),
sinks = Set( bindings(v) for v in self._sinks ),
parameters = [ bindings(p) for p in self._parameters ],
parameter_dependencies = {
bindings(p):[(bindings(d),t) for d,t in pd] for p,pd in self._parameter_dependencies.items()
},
aggregate_names = self._aggregate_names,
bindings = self._bindings + bindings
)
def add_transitions( self, trs ):
# We take BoxModel to be an immutable object, so this operation
# returns a new BoxModel. trs is a list of (source,target,rate)
# tuples suitable for adding to self._graph
#print 'add_transitions', trs
#print 'parameters before', self._parameters
nbm = deepcopy(self)
nbm._graph.add_edges( trs )
#print self._vars
for f,t,r in trs:
try:
#print r
#print r.variables()
#print Set( r.variables() ).difference( Set( self._vars ) )
nbm._parameters.update( Set( r.variables() ) - self._vars - self._aggregate_names )
except AttributeError: pass
#print 'parameters after', nbm._parameters
return nbm
def reorder_latex_variables( self, ex ):
#return ex
# Sage likes to write "I S \beta" in unicode or whatever order -
# we want "\beta S I", and more generally, first parameters and
# then compartment names, in a sort of order given by the flow
# of the transitions. Here we use left-to-right, top-to-bottom
# order based on the positions given for compartments.
# this function returns a sort of pseudo-expression that's only
# suitable for printing, not for doing math with
try: self._sorter
except AttributeError:
from collections import defaultdict
sort_order_map = dict(
## parameters first, Greek letters before Roman
[ (latex(v),(T,T)) for v in self._parameters for T in [-1e+10 if latex(v)[0] == '\\' or latex(v)[0:2] == '{\\' else -0.9e+10] ] +
## then compartment names, in order of the graph layout
[ (latex(vv),(pp[0],-pp[1])) for vv,pp in self._graph.get_pos().items() ] +
## then any aggregate names
[ (latex(v),(1e+10,1e+10)) for v in self._aggregate_names ]
)
# this converter is defined later in this file
self._sorter = sort_latex_variables(
## parameters then compartments
sort_order_map,
## numbers before anything
order_numbers_as=(-1e+12,-1e+12),
## other expressions just after numbers
order_unknown_as=(-1e+11,-1e+11)
)
#print 'use', self._sorter._map, 'on', latex(ex)
try: return self._sorter( ex )
except AttributeError: # ex is not an expression
return ex
def __repr__(self):
try:
return '(BoxModel with compartments ' + str(tuple(self._vars)) + ')'
except AttributeError: # _vars not assigned yet
return '(BoxModel)'
def plot_boxes( self, filename=None, inline=False, figsize=(6,6), transform_graph=None, ellipsis_vertices=(), **options ):
g = self._graph
## apply the user-supplied transform if any
## for example, use transform_graph=per_capita_rates to
## plot using per capita rather than absolute flow rates
if transform_graph is not None:
g = transform_graph(g)
try:
## free_product may assign this member
ellipsis_vertices = Set( self._ellipsis_vars )
def ellipsize( g ):
def ellipsize_vertex( v ):
if v in ellipsis_vertices:
return SR.symbol( str(v), latex_name='\ldots' )
else:
return v
return DiGraph( [
( ellipsize_vertex(v), ellipsize_vertex(w), r )
for v,w,r in g.edge_iterator()
],
pos = { ellipsize_vertex(v):p for v, p in g.get_pos().iteritems() }
)
g = ellipsize(g)
except AttributeError:
ellipsis_vertices = ()
## tweak the latex representation of the rates
g = DiGraph(
[ g.vertices(), [ (v,w,self.reorder_latex_variables(e)) for v,w,e in g.edge_iterator() ] ],
format='vertices_and_edges',
multiedges=True,
pos = g.get_pos()
)
#print 'plot_boxes, sources', self._sources, ', sinks', self._sinks
return plot_boxmodel_graph( g,
filename=filename,
inline=inline,
figsize=figsize,
empty_vertices=self._sources | self._sinks,
ellipsis_vertices=ellipsis_vertices,
**options
)
def plot( self, *args, **aargs ):
def lx(s): return '$%s$'%latex(s)
lfg = DiGraph(
[[lx(s) for s in tup] for tup in self._graph.edge_iterator() ],
multiedges=True
)
vargs = {
'edge_labels' : True,
'talk' : True
}
if 'pos' not in aargs and self._graph.get_pos() is not None:
vargs['pos'] = { lx(v) : p for v,p in self._graph.get_pos().items() }
vargs.update( aargs )
#print 'plot vargs:', vargs
return lfg.plot( *args, **vargs )
def transpose_graph_in_place( self ):
self._graph.set_pos( { v:(-y,-x) for v,(x,y) in self._graph.get_pos().iteritems() } )
def transpose_graph( self ):
nm = deepcopy( self )
nm.transpose_graph_in_place()
return nm
def aggregate_compartments( self, compartment_aggregation ):
aggregate = {}
for vt in self._graph.vertex_iterator():
## what if vt is simple and doesn't have operands
aggregate.setdefault( tuple( compartment_aggregation( vt.operands() ) ), [] ).append( vt.operands() )
## aggregate is { new vertex: [old vertices], ... }
print 'aggregate:', aggregate
flow_sums = {}
for v in self._graph.vertex_iterator():
av = compartment_aggregation( v )
if av not in flow_sums: flow_sums[av] = {}
for _,w,e in self._graph.outgoing_edge_iterator(v):
aw = compartment_aggregation( w )
flow_sums[av].setdefault( aw, SR(0) )
flow_sums[av][aw] += e
## flow_sums[av][aw] is sum of all transitions from
## (aggregated vertex) av to aw
## transitions are in terms of old vertex names
## now do substitutions to transform the transition sums
agg_eqns, agg_symbols = [], []
agg_subs = dynamicalsystems.Bindings()
for newt,oldts in aggregate.items():
print 'will combine', sum( oldts ), '==', newt
agg_symbols.append( oldts[0] )
agg_eqns.append( oldts[0] == newt - sum( oldts[1:] ) )
agg_graph_dict = {}
for av, ve in flow_sums.iteritems():
agg_graph_dict[av] = {}
for aw, e in ve.iteritems():
sym = SR.symbol()
print e,
solns = solve( [ sym == e ] + agg_eqns, sym, *agg_symbols, solution_dict=True )
#print 'solve', [ sym == e ] + agg_eqns, ',', [sym] + agg_symbols, '\n ', solns
if len(solns) == 1:
#print ' ', maxima(sym), [str(k) == str(sym) for k in solns[0].keys()]
el = [ex for k,ex in solns[0].items() if str(k) == str(sym)]
print '==>', el[0]
agg_graph_dict[av][aw] = el[0]
else:
raise RuntimeError, 'Could not simplify expression ' + str(e) + ':' + str(solns)
print 'agg_graph_dict', agg_graph_dict
#self._vc_eqns = vc_eqns
## make list of transformed variables
## they are in those dicts, but we want the order
agg_vars = []
for v in self._vars:
av = compartment_aggregation( v )
if av not in agg_vars: agg_vars.append(av)
print 'agg_vars', agg_vars
## position the aggregates by matching them to a subset of original
## compartments
apos = {}
for t,p in self._graph.get_pos().iteritems():
at = compartment_aggregation( t )
if at not in apos: apos[at] = p
print 'apos', apos
return boxmodel.BoxModel( DiGraph( agg_graph_dict, pos=apos ), agg_vars )
def combine_arrows( self ):
#return self.aggregate_compartments( lambda x:x )
d = {}
for v,w,r in self._graph.edge_iterator():
d[(v,w)] = d.get( (v,w), 0 ) + r
ee = [ (v,w,r) for (v,w),r in d.iteritems() ]
b = BoxModel( DiGraph( ee, pos=self._graph.get_pos() ), self._vars )
return b
def separate_arrows( self ):
plus = SR('x+1').operator()
def terms_iterator( e ):
e = e.expand()
if e.operator() == plus:
for t in e.operands():
for tt in terms_iterator(t):
yield t
else:
yield e
return BoxModel( DiGraph(
[ (v,w,ee) for v,w,e in self._graph.edge_iterator() for ee in terms_iterator(e) ],
pos = self._graph.get_pos(),
multiedges=True
),
self._vars
)
def jump_process(self):
try:
self._jump_process
except AttributeError:
#print 'making BoxModel JumpProcess'
nvars = self._sources | self._sinks
vars = [ v for v in self._vars if v not in nvars ]
var_index = { v:i for i,v in enumerate(vars) }
#var_index.update( { v:None for v in nvars } )
#for x in self._sources.union( self._sinks ):
# var_index[x] = None
#print 'var_index:',var_index
def to_r( s, t ):
r = [ 0 for v in vars ]
if s in var_index:
r[var_index[s]] = -1
if t in var_index:
r[var_index[t]] = 1
return r
self._jump_process = dynamicalsystems.JumpProcess(
vars,
[ (to_r(s,t),rate) for s,t,rate in self._graph.edges() ],
bindings=self._bindings
)
return self._jump_process
## for forward_equations see boxkolmogorov.py
def backward_equations(self, N, q_name='q'):
return self.jump_process().backward_equations(N,q_name)
def generator_matrix( self, N, rate_ring=QQ ):
return self.jump_process().generator_matrix(N, rate_ring)
def ode_flow(self):
return self.jump_process().deterministic_flow()
def ode(self, time_variable=SR.symbol('t'), bindings=dynamicalsystems.Bindings()):
return self.jump_process().deterministic_ode(time_variable, bindings)
def difference_equation(self,
step=1, time_variable=SR.symbol('t'), bindings=dynamicalsystems.Bindings()):
return self.jump_process().approximate_deterministic_difference_equation(
step=step, time_variable=time_variable, bindings=bindings
)
def micro_transitions( self ):
# This could produce micro transitions but it isn't right so far
# TODO: move this to JumpProcess
# (in addition to making it work)
ltx = dynamicalsystems.latex_output_base( dynamicalsystems.write_to_string() )
lines = []
for source, target, rate in self._graph.edge_iterator():
mu = MakeMicro( self, source )
ut = mu( rate )
#print str(ut); sys.stdout.flush()
lines += [ r' & ' + latex(mu.sigma_fn(SR('x'))) + r'\to' + latex(target)
+ r' \quad\text{ at rate } '
+ latex( ut )
]
ltx.write_align( *lines )
return ltx._output._str
# useful parent class: expression converter that doesn't
# do anything
from sage.symbolic.expression_conversions import SubstituteFunction
class IdentityConverter(SubstituteFunction):
def __init__(self):
pass
def composition(self, ex, operator):
# override the parent class's function replacing step
return operator(*map(self, ex.operands()))
class MakeMicro(IdentityConverter):
_mul = SR('a*b').operator()
from sage.symbolic.function_factory import function
delta_fn = function('delta', latex_name=r'\delta')
sigma_fn = function('sigma', print_latex_func=lambda self, x:r'\sigma_{%s}' % latex(x))
bm_sum = function( 'sum', print_latex_func=lambda self, x, s, ex:r'\sum_{%s\in %s}%s' %( latex(x), latex(s), latex(ex) ) )
bm_indicator = function( 'indicator', print_latex_func=lambda self, ev:r'\mathbb{1}\left(%s\right)' % latex(ev) )
bm_index_param = function( 'bm_index_param' )
def __init__(self, model, source):
self._model = model
self._source = source
self._working = False
self._tags = { s : SR.symbol( 'text'+str(s), latex_name=r'\texttt{%s}'%str(s) ) for s in self._model._vars }
def __call__(self, ex):
if self._working:
return super(MakeMicro,self).__call__(ex)
self._working = True
tx = super(MakeMicro,self).__call__( ex / self._source )
self._working = False
return (
self.bm_indicator( self.sigma_fn( SR.symbol('x') ) == self._tags[self._source] ) *
tx.subs( { s : self.bm_sum( SR.symbol('y'), SR.symbol('X'), 1 / SR('N') * self.bm_indicator( self.sigma_fn( SR('y') ) == self._tags[s] ) ) for s in self._model._vars } )
)
def arithmetic(self, ex, operator):
# do special handling to products of things, before processing the
# things, to catch inner products
if operator == self._mul:
return self.do_inner_product( *ex.operands() )
else:
return reduce( operator, *map(self, ex.operands()) )
def symbol(self, s):
return self.do_inner_product( s ) # just in case
def do_inner_product(self, *args):
# leave multiplications as is, except in the case of a
# parameter dependency marked "sumover": convert that from
# a regular multiplication to an inner product.
#print 'processing product', args
margs = list(args)
sumover = []
dummy_list = ['y', 'z', 'u', 'v', 'w', 's', 't', 'p', 'q', 'r']
for p,pd in self._model._parameter_dependencies.items():
if p in margs:
#print 'found', p, 'in factors:', args
if all( d in margs + [self._source] for d,x in pd ):
#print 'found all of its deps', [d for d,x in pd], 'as well'
indices_for_p = []
p_times = SR(1)
for d,ss in pd:
if ss == deps.sumover:
dummy_var = SR.symbol( dummy_list.pop(0) )
indices_for_p.append( dummy_var )
sumover.append( dummy_var )
#print 'will sum over', dummy_var, 'in', d; sys.stdout.flush()
margs[margs.index(d)] = 1
p_times *= self.bm_indicator( self.sigma_fn( dummy_var ) == self._tags[d] )
#print 'made it through equality'; sys.stdout.flush()
elif d == self._source:
indices_for_p += [SR('x')]
else:
raise ValueError, 'I am confused about dependence on ' + str(d)
index_of_p = margs.index(p)
margs[index_of_p] = self.bm_index_param( p, *indices_for_p ) * p_times
for dv in reversed(sumover):
margs[index_of_p] = self.bm_sum( dv, SR.symbol('X'), 1 / SR('N') * margs[index_of_p] )
margs[index_of_p] = margs[index_of_p].substitute_function(
self.bm_index_param,
lambda *args: dynamicalsystems.subscriptedsymbol( *args )
)
#print margs
else:
raise RuntimeError, (
"Missing parameter dependencies in expression " +
str( reduce( self._mul, args ) )
)
expr = reduce( self._mul, margs )
#print 'becomes', expr
return expr
class sort_latex_variables(sage.symbolic.expression_conversions.ExpressionTreeWalker):
def __init__(self, sort_order_map, order_numbers_as=-oo, order_unknown_as=oo):
#print 'sort_order_map is', sort_order_map
self._map = sort_order_map
self._number_order = order_numbers_as
self._unknown_order = order_unknown_as
return super(sort_latex_variables,self).__init__(SR(0))
def arithmetic(self, ex, operator):
if operator == (2*SR.symbol('x')).operator():
#print 'reorder latex product of', ex.operands()
## sort the factors in a multiplication
def keyfn(x):
try:
return self._map[latex(x)]
except KeyError:
if x.is_numeric(): return self._number_order
else: return self._unknown_order
ll = sorted( ex.operands(), key=keyfn )
minusop = (SR.symbol('x')-1).operator() # it's actually +
## special case: a factor is -(x-1) :
## we will write that as (1-x)
## if there's a factor of -1, look for a subtraction
rev = [ e for e in ll if e.operator() == minusop ] if -1 in ll else []
if len( rev ) > 0:
## there will only be one -1
ll = [ e for e in ll if e != -1 ]
rev = rev[:1]
#print 'will reverse', rev
## if there are factors of y^-1
## we will put those as y in a denominator
denom = [ d for d in ll if
d.operator()==(1/SR.symbol('x')).operator()
and d.operands()[1] == SR(-1)
]
ll = [ n for n in ll if n not in denom ]
denom = [ 1/d for d in denom ]
## function to render each factor in latex
def to_lx( ex, within ):
## subtractions
if ex.operator() == minusop:
## if reversed, write backwards
if ex in rev:
return r'\left({}-{}\right)'.format(latex(-ex.operands()[1]),latex(ex.operands()[0]))
## otherwise, write forwards
#else:
#return ''.join( (r'\left(',latex(ex),r'\right)') )
## write additions
if ex.operator() == (SR.symbol('x')+1).operator() and within:
#print 'add () to', ex
return r'\left({}\right)'.format(latex(ex))
## if it's a compound symbol, put it in parens
if ex.is_symbol():
lx = latex(ex)
lxinner = lx
while lxinner[0] == '{' and lxinner[-1] == '}':
lxinner = lxinner[1:-1]
if len(lxinner) > 1 and '_' not in lxinner and '^' not in lxinner and not( lxinner[0] == '\\' and lxinner[1:].isalpha() ):
#print 'add () to', lxinner
return r'\left({}\right)'.format(lxinner)
else:
#print 'a symbol:', lx
return lx
## anything else, use default latex rendering
#print ' default latex,', latex(ex)
return latex(ex)
## combine the factors in the numerator
#print ll
lname = ' '.join(to_lx(v, len(ll)>1) for v in ll)
## if any factors in denominator, combine them and make fraction
if len(denom) > 0:
#print '/', denom
lden = ' '.join(to_lx(d, len(denom)>1) for d in denom)
lname = r'\frac{'+lname+'}{'+lden+'}'
#print latex(ex), ' ==> ', lname
Msym = SR.symbol( 'M_{}'.format( ZZ.random_element(1e+10) ), latex_name=lname )
return Msym
elif ( operator == (2+SR.symbol('x')).operator() and
ex.operands()[0].operator() == (2*SR.symbol('x')).operator() and
SR(-1) in ex.operands()[0].operands() and
ex.operands()[1] == 1):
#print 'normalise', latex(ex), 'to 1-x form'
lname = latex(ex.operands()[1])+'-'+latex(-ex.operands()[0])
Msym = SR.symbol( 'M_{}'.format( ZZ.random_element(1e+10) ), latex_name=lname )
return Msym
#print 'typeset', latex(ex), 'as is'
#print 'operator is', str(ex.operator())
return super(sort_latex_variables,self).arithmetic(ex,operator)
|
tcporco/SageBoxModels
|
boxmodel/boxmodel.py
|
Python
|
gpl-2.0
| 26,560
|
#!/usr/bin/env python
#
# Copyright (c) 2016 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
import logging
from string import Template
from builtins_generator import BuiltinsGenerator, WK_lcfirst, WK_ucfirst
from builtins_templates import BuiltinsGeneratorTemplates as Templates
log = logging.getLogger('global')
class BuiltinsInternalsWrapperImplementationGenerator(BuiltinsGenerator):
def __init__(self, model):
BuiltinsGenerator.__init__(self, model)
self.internals = filter(lambda object: 'internal' in object.annotations, model.objects)
def output_filename(self):
return "%sJSBuiltinInternals.cpp" % self.model().framework.setting('namespace')
def generate_output(self):
args = {
'namespace': self.model().framework.setting('namespace'),
}
sections = []
sections.append(self.generate_license())
sections.append(Template(Templates.DoNotEditWarning).substitute(args))
sections.append(self.generate_primary_header_includes())
sections.append(self.generate_secondary_header_includes())
sections.append(Template(Templates.NamespaceTop).substitute(args))
sections.append(self.generate_section_for_object())
sections.append(Template(Templates.NamespaceBottom).substitute(args))
return "\n\n".join(sections)
def generate_secondary_header_includes(self):
header_includes = [
(["WebCore"],
("WebCore", "JSDOMGlobalObject.h"),
),
(["WebCore"],
("WebCore", "WebCoreJSClientData.h"),
),
(["WebCore"],
("JavaScriptCore", "heap/HeapInlines.h"),
),
(["WebCore"],
("JavaScriptCore", "heap/SlotVisitorInlines.h"),
),
(["WebCore"],
("JavaScriptCore", "runtime/JSCJSValueInlines.h"),
),
(["WebCore"],
("JavaScriptCore", "runtime/StructureInlines.h"),
),
]
return '\n'.join(self.generate_includes_from_entries(header_includes))
def generate_section_for_object(self):
lines = []
lines.append(self.generate_constructor())
lines.append(self.generate_visit_method())
lines.append(self.generate_initialize_method())
return '\n'.join(lines)
def accessor_name(self, object):
return WK_lcfirst(object.object_name)
def member_name(self, object):
return "m_" + self.accessor_name(object)
def member_type(self, object):
return WK_ucfirst(object.object_name) + "BuiltinFunctions"
def generate_constructor(self):
guards = set([object.annotations.get('conditional') for object in self.internals if 'conditional' in object.annotations])
lines = ["JSBuiltinInternalFunctions::JSBuiltinInternalFunctions(JSC::VM& vm)",
" : m_vm(vm)"]
for object in self.internals:
initializer = " , %s(m_vm)" % self.member_name(object)
lines.append(BuiltinsGenerator.wrap_with_guard(object.annotations.get('conditional'), initializer))
lines.append("{")
lines.append(" UNUSED_PARAM(vm);")
lines.append("}\n")
return '\n'.join(lines)
def property_macro(self, object):
lines = []
lines.append("#define DECLARE_GLOBAL_STATIC(name) \\")
lines.append(" JSDOMGlobalObject::GlobalPropertyInfo( \\")
lines.append(" clientData.builtinFunctions().%sBuiltins().name##PrivateName(), %s().m_##name##Function.get() , JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly)," % (self.accessor_name(object), self.accessor_name(object)))
lines.append(" WEBCORE_FOREACH_%s_BUILTIN_FUNCTION_NAME(DECLARE_GLOBAL_STATIC)" % object.object_name.upper())
lines.append("#undef DECLARE_GLOBAL_STATIC")
return '\n'.join(lines)
def generate_visit_method(self):
lines = ["void JSBuiltinInternalFunctions::visit(JSC::SlotVisitor& visitor)",
"{"]
for object in self.internals:
visit = " %s.visit(visitor);" % self.member_name(object)
lines.append(BuiltinsGenerator.wrap_with_guard(object.annotations.get('conditional'), visit))
lines.append(" UNUSED_PARAM(visitor);")
lines.append("}\n")
return '\n'.join(lines)
def _generate_initialize_static_globals(self):
lines = [" JSVMClientData& clientData = *static_cast<JSVMClientData*>(m_vm.clientData);",
" JSDOMGlobalObject::GlobalPropertyInfo staticGlobals[] = {"]
for object in self.internals:
lines.append(BuiltinsGenerator.wrap_with_guard(object.annotations.get('conditional'), self.property_macro(object)))
lines.append(" };")
lines.append(" globalObject.addStaticGlobals(staticGlobals, WTF_ARRAY_LENGTH(staticGlobals));")
lines.append(" UNUSED_PARAM(clientData);")
return '\n'.join(lines)
def generate_initialize_method(self):
lines = ["void JSBuiltinInternalFunctions::initialize(JSDOMGlobalObject& globalObject)",
"{",
" UNUSED_PARAM(globalObject);"]
for object in self.internals:
init = " %s.init(globalObject);" % self.member_name(object)
lines.append(BuiltinsGenerator.wrap_with_guard(object.annotations.get('conditional'), init))
lines.append("")
guards = set([object.annotations.get('conditional') for object in self.internals if 'conditional' in object.annotations])
lines.append(BuiltinsGenerator.wrap_with_guard(" || ".join(guards), self._generate_initialize_static_globals()))
lines.append("}")
return '\n'.join(lines)
|
teamfx/openjfx-8u-dev-rt
|
modules/web/src/main/native/Source/JavaScriptCore/Scripts/builtins/builtins_generate_internals_wrapper_implementation.py
|
Python
|
gpl-2.0
| 7,074
|
# encoding: utf-8
# module PyKDE4.kio
# from /usr/lib/python2.7/dist-packages/PyKDE4/kio.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdeui as __PyKDE4_kdeui
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
class KDirWatch(__PyQt4_QtCore.QObject):
# no doc
def addDir(self, *args, **kwargs): # real signature unknown
pass
def addFile(self, *args, **kwargs): # real signature unknown
pass
def contains(self, *args, **kwargs): # real signature unknown
pass
def created(self, *args, **kwargs): # real signature unknown
pass
def ctime(self, *args, **kwargs): # real signature unknown
pass
def deleted(self, *args, **kwargs): # real signature unknown
pass
def dirty(self, *args, **kwargs): # real signature unknown
pass
def exists(self, *args, **kwargs): # real signature unknown
pass
def internalMethod(self, *args, **kwargs): # real signature unknown
pass
def isStopped(self, *args, **kwargs): # real signature unknown
pass
def removeDir(self, *args, **kwargs): # real signature unknown
pass
def removeFile(self, *args, **kwargs): # real signature unknown
pass
def restartDirScan(self, *args, **kwargs): # real signature unknown
pass
def self(self, *args, **kwargs): # real signature unknown
pass
def setCreated(self, *args, **kwargs): # real signature unknown
pass
def setDeleted(self, *args, **kwargs): # real signature unknown
pass
def setDirty(self, *args, **kwargs): # real signature unknown
pass
def startScan(self, *args, **kwargs): # real signature unknown
pass
def statistics(self, *args, **kwargs): # real signature unknown
pass
def stopDirScan(self, *args, **kwargs): # real signature unknown
pass
def stopScan(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
DNotify = 2
FAM = 0
INotify = 1
Method = None # (!) real value is ''
Stat = 3
WatchDirOnly = 0
WatchFiles = 1
WatchMode = None # (!) real value is ''
WatchModes = None # (!) real value is ''
WatchSubDirs = 2
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/PyKDE4/kio/KDirWatch.py
|
Python
|
gpl-2.0
| 2,320
|
#!/usr/bin/env python3
#
# (c) 2013, Russell Stuart.
# Licensed under GPLv2, or any later version. See COPYING for details.
#
from distutils.core import setup
import re
def get_long_description():
handle = open("doc/lrparsing.rst")
while not next(handle).startswith("====="):
pass
long_description=[]
for line in handle:
if line.startswith("====="):
break
line = re.sub(":[a-z]*:`([^`<]*[^`< ])[^`]*`", "\\1", line)
long_description.append(line)
return ''.join(long_description[:-1])
setup(
name="lrparsing",
description="An LR(1) parser hiding behind a pythonic interface",
long_description=get_long_description(),
version="1.0.4",
author="Russell Stuart",
author_email="russell-lrparsing@stuart.id.au",
url="http://www.stuart.id.au/russell/files/lrparsing",
package_dir={"": "lrparsing"},
py_modules=["lrparsing"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2 :: Only",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
|
wks/lrparsing3
|
setup.py
|
Python
|
gpl-2.0
| 1,264
|