repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
uart/scarphase
|
pyscarphase/plot/color.py
|
Python
|
bsd-3-clause
| 1,885
| 0.000531
|
# Copyright (c) 2011-2013 Andreas Sembrant
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modificatio
|
n, are permitted provided that the following conditions are
# met:
#
# - Redistributions
|
of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# - Neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sembrant
import matplotlib.colors
COLORS = [
'#235590',
'#449A41',
'#FEA029',
'#D61E28',
'#7A377C',
]
cconv = matplotlib.colors.ColorConverter()
for i in xrange(len(COLORS)):
COLORS[i] = cconv.to_rgb(COLORS[i])
cm = matplotlib.colors.ListedColormap(COLORS, name='scarphase')
|
nborwankar/open-budgets
|
openbudget/apps/accounts/factories.py
|
Python
|
bsd-3-clause
| 1,107
| 0
|
import datetime
import factory
from django.utils.timezone import utc
from openbudget.apps.accounts.models import Account
class AccountFactory(factory.DjangoModelFactory):
FACTORY_FOR = Account
password = 'letmein'
email = factory.Sequence(lambda n: 'p{0}@here.com'.format(n))
first_name = factory.Sequence(lambda n: 'first_name{0}'.format(n))
last_name = factory.Sequence(lambda n: 'last_name{0}'.format(n))
is_staff = False
is_active = True
is_superuser = False
last_login = factory.Sequence(
lambda n:
|
datetime.datetime.utcnow().replace(tzinfo=utc)
)
created_on = factory.Sequence(
lambda n: datetime.datetime.utcnow().replace(tzinfo=utc)
)
last_modified = factory.Sequence(
lambda n: datetime.datetime.utcnow().replace(tzinfo=utc)
|
)
@classmethod
def _prepare(cls, create, **kwargs):
password = kwargs.pop('password', None)
account = super(AccountFactory, cls)._prepare(create, **kwargs)
account.set_password(password)
if create:
account.save()
return account
|
Patrick-and-Michael/trumptweets
|
config/settings/production.py
|
Python
|
mit
| 4,984
| 0.001605
|
# -*- coding: utf-8 -*-
"""
Production Configurations
- Use Redis for cache
"""
from __future__ import absolute_import, unicode_literals
from boto.s3.connection import OrdinaryCallingFormat
from django.utils import six
from .common import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ
SECRET_KEY = env('DJANGO_SECRET_KEY')
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# SECURITY CONFIGURATION
# ------------------------------------------------------------------------------
# See https://docs.djangoproject.com/en/1.9/ref/middleware/#module-django.middleware.security
# and https://docs.djangoproject.com/ja/1.9/howto/deployment/checklist/#run-manage-py-check-deploy
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True)
CSRF_COOKIE_SECURE = True
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid fo
|
r this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['example.com'])
# END SITE CONFIGURATION
INSTALLED_APPS += ('gunicorn', )
# STORAGE CONFIGURATION
# ------------------------------------------------------------------------------
# Uploaded Media Files
# ------------------------
# See: http://django-sto
|
rages.readthedocs.io/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
# TODO See: https://github.com/jschneier/django-storages/issues/47
# Revert the following and use str after the above-mentioned bug is fixed in
# either django-storage-redux or boto
AWS_HEADERS = {
'Cache-Control': six.b('max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY))
}
# URL that handles the media served from MEDIA_ROOT, used for managing
# stored files.
# See:http://stackoverflow.com/questions/10390244/
from storages.backends.s3boto import S3BotoStorage
StaticRootS3BotoStorage = lambda: S3BotoStorage(location='static')
MediaRootS3BotoStorage = lambda: S3BotoStorage(location='media')
DEFAULT_FILE_STORAGE = 'config.settings.production.MediaRootS3BotoStorage'
#MEDIA_URL = 'https://s3.amazonaws.com/%s/media/' % AWS_STORAGE_BUCKET_NAME
# Static Assets
# ------------------------
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:
# https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader',
['django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader', ]),
]
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# Use the Heroku-style specification
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
DATABASES['default'] = env.db('DATABASE_URL')
# CACHING
# ------------------------------------------------------------------------------
REDIS_LOCATION = '{0}/{1}'.format(env('REDIS_URL', default='redis://127.0.0.1:6379'), 0)
# Heroku URL does not pass the DB number, so we parse it in
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': REDIS_LOCATION,
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'IGNORE_EXCEPTIONS': True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
# Custom Admin URL, use {% url 'admin:index' %}
ADMIN_URL = env('DJANGO_ADMIN_URL')
# Your production stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
# EMAIL
# ------------------------------------------------------------------------------
# for now, send emails to console, even in production
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.console.EmailBackend')
|
zhsso/ubunto-one
|
lib/config.py
|
Python
|
agpl-3.0
| 1,777
| 0
|
# Copyright 2008-2015 Canonical
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check http://launchpad.net/filesync-server
"""A config management layer."""
import os
import yaml
from utilities import devconfig
def _load():
"""Load configuration from a file."""
fpath = os.environ["CONFIG"]
with open(fpath, "rt") as fh:
data = yaml.load(fh)
return data
class _Config(dict):
"""The configuration holder."""
def __init__(self, data=None):
if data is None:
data = _load()
super(_Config, self).__init__(data)
def __getattr__(self, name):
value = self[name]
if isinstance(value, dict) and not isinstance(value, _Config):
wrapped = _Config(value)
self[name] = wrapped
return wrapped
e
|
lse:
return value
def __setattr__(self, name, value):
self[name] = value
def __str__(self):
|
return "<Config at %d: %s>" % (
id(self), super(_Config, self).__str__())
# instantiate the config and dynamically load the active ports
config = _Config()
devconfig.development_ports(config)
|
ekohl/django-waffle
|
waffle/__init__.py
|
Python
|
bsd-3-clause
| 6,842
| 0.000438
|
from decimal import Decimal
import random
import hashlib
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_save, post_delete, m2m_changed
from waffle.models import Flag, Sample, Switch
VERSION = (0, 9, 2)
__version__ = '.'.join(map(str, VERSION))
CACHE_PREFIX = getattr(settings, 'WAFFLE_CACHE_PREFIX', u'waffle:')
FLAG_CACHE_KEY = u'flag:%s'
FLAGS_ALL_CACHE_KEY = u'flags:all'
FLAG_USERS_CACHE_KEY = u'flag:%s:users'
FLAG_GROUPS_CACHE_KEY = u'flag:%s:groups'
SAMPLE_CACHE_KEY = u'sample:%s'
SAMPLES_ALL_CACHE_KEY = u'samples:all'
SWITCH_CACHE_KEY = u'switch:%s'
SWITCHES_ALL_CACHE_KEY = u'switches:all'
COOKIE_NAME = getattr(settings, 'WAFFLE_COOKIE', 'dwf_%s')
TEST_COOKIE_NAME = getattr(settings, 'WAFFLE_TESTING_COOKIE', 'dwft_%s')
def keyfmt(k, v=None):
if v is None:
return CACHE_PREFIX + k
return CACHE_PREFIX + hashlib.md5(k % v).hexdigest()
class DoesNotExist(object):
"""The record does not exist."""
@property
def active(self):
return getattr(settings, 'WAFFLE_SWITCH_DEFAULT', False)
def set_flag(request, flag_name, active=True, session_only=False):
"""Set a flag value on a request object."""
if not hasattr(request, 'waffles'):
request.waffles = {}
request.waffles[flag_name] = [active, session_only]
def flag_is_active(request, flag_name):
flag = cache.get(keyfmt(FLAG_CACHE_KEY, flag_name))
if flag is None:
try:
flag = Flag.objects.get(name=flag_name)
cache_flag(instance=flag)
except Flag.DoesNotExist:
return getattr(settings, 'WAFFLE_FLAG_DEFAULT', False)
if getattr(settings, 'WAFFLE_OVERRIDE', False):
if flag_name in request.GET:
return request.GET[flag_name] == '1'
if flag.everyone:
return True
elif flag.everyone is False:
return False
if flag.testing: # Testing mode is on.
tc = TEST_COOKIE_NAME % flag_name
if tc in request.GET:
on = request.GET[tc] == '1'
if not hasattr(request, 'waffle_tests'):
request.waffle_tests = {}
request.waffle_tests[flag_name] = on
return on
if tc in request.COOKIES:
return request.COOKIES[tc] == 'True'
user = request.user
if flag.authenticated and user.is_authenticated():
return True
if flag.staff and user.is_staff:
return True
if flag.superusers and user.is_superuser:
return True
if fl
|
ag.languages:
languages = flag.languages.split(',')
if (hasattr(request, 'LANGUAGE_CODE') and
request.LANGUAGE_CODE in languages):
return True
flag_users = cache.get(keyfmt(FLAG_USERS_CACHE_KEY, flag.name))
if flag_users is None:
flag_users = f
|
lag.users.all()
cache_flag(instance=flag)
if user in flag_users:
return True
flag_groups = cache.get(keyfmt(FLAG_GROUPS_CACHE_KEY, flag.name))
if flag_groups is None:
flag_groups = flag.groups.all()
cache_flag(instance=flag)
user_groups = user.groups.all()
for group in flag_groups:
if group in user_groups:
return True
if flag.percent > 0:
if not hasattr(request, 'waffles'):
request.waffles = {}
elif flag_name in request.waffles:
return request.waffles[flag_name][0]
cookie = COOKIE_NAME % flag_name
if cookie in request.COOKIES:
flag_active = (request.COOKIES[cookie] == 'True')
set_flag(request, flag_name, flag_active, flag.rollout)
return flag_active
if Decimal(str(random.uniform(0, 100))) <= flag.percent:
set_flag(request, flag_name, True, flag.rollout)
return True
set_flag(request, flag_name, False, flag.rollout)
return False
def switch_is_active(switch_name):
switch = cache.get(keyfmt(SWITCH_CACHE_KEY, switch_name))
if switch is None:
try:
switch = Switch.objects.get(name=switch_name)
cache_switch(instance=switch)
except Switch.DoesNotExist:
switch = DoesNotExist()
switch.name = switch_name
cache_switch(instance=switch)
return switch.active
def sample_is_active(sample_name):
sample = cache.get(keyfmt(SAMPLE_CACHE_KEY, sample_name))
if sample is None:
try:
sample = Sample.objects.get(name=sample_name)
cache_sample(instance=sample)
except Sample.DoesNotExist:
return getattr(settings, 'WAFFLE_SAMPLE_DEFAULT', False)
return Decimal(str(random.uniform(0, 100))) <= sample.percent
def cache_flag(**kwargs):
action = kwargs.get('action', None)
# action is included for m2m_changed signal. Only cache on the post_*.
if not action or action in ['post_add', 'post_remove', 'post_clear']:
f = kwargs.get('instance')
cache.add(keyfmt(FLAG_CACHE_KEY, f.name), f)
cache.add(keyfmt(FLAG_USERS_CACHE_KEY, f.name), f.users.all())
cache.add(keyfmt(FLAG_GROUPS_CACHE_KEY, f.name), f.groups.all())
def uncache_flag(**kwargs):
flag = kwargs.get('instance')
data = {
keyfmt(FLAG_CACHE_KEY, flag.name): None,
keyfmt(FLAG_USERS_CACHE_KEY, flag.name): None,
keyfmt(FLAG_GROUPS_CACHE_KEY, flag.name): None,
keyfmt(FLAGS_ALL_CACHE_KEY): None
}
cache.set_many(data, 5)
post_save.connect(uncache_flag, sender=Flag, dispatch_uid='save_flag')
post_delete.connect(uncache_flag, sender=Flag, dispatch_uid='delete_flag')
m2m_changed.connect(uncache_flag, sender=Flag.users.through,
dispatch_uid='m2m_flag_users')
m2m_changed.connect(uncache_flag, sender=Flag.groups.through,
dispatch_uid='m2m_flag_groups')
def cache_sample(**kwargs):
sample = kwargs.get('instance')
cache.add(keyfmt(SAMPLE_CACHE_KEY, sample.name), sample)
def uncache_sample(**kwargs):
sample = kwargs.get('instance')
cache.set(keyfmt(SAMPLE_CACHE_KEY, sample.name), None, 5)
cache.set(keyfmt(SAMPLES_ALL_CACHE_KEY), None, 5)
post_save.connect(uncache_sample, sender=Sample, dispatch_uid='save_sample')
post_delete.connect(uncache_sample, sender=Sample,
dispatch_uid='delete_sample')
def cache_switch(**kwargs):
switch = kwargs.get('instance')
cache.add(keyfmt(SWITCH_CACHE_KEY, switch.name), switch)
def uncache_switch(**kwargs):
switch = kwargs.get('instance')
cache.set(keyfmt(SWITCH_CACHE_KEY, switch.name), None, 5)
cache.set(keyfmt(SWITCHES_ALL_CACHE_KEY), None, 5)
post_delete.connect(uncache_switch, sender=Switch,
dispatch_uid='delete_switch')
post_save.connect(uncache_switch, sender=Switch, dispatch_uid='save_switch')
|
ingadhoc/website
|
payment_todopago/todopago/test/SendAuthorizeRequestTest.py
|
Python
|
agpl-3.0
| 2,473
| 0.003235
|
# pylint: disable-all
# flake8: noqa
import sys
sys.path.append("..")
from todopagoconnector import TodoPagoConnector
from SendAuthorizeRequestData import SendAuthorizeRequestData
import unittest
from unittest import TestCase
if sys.version_info[0] >= 3:
from unittest.mock import patch, Mock
else:
from mock import patch, Mock, MagicMock
class SendAuthorizeRequestTest(TestCase):
@patch('todopagoconnector.TodoPagoConnector')
def test_get_credentials_ok(self, MockTodoPagoConnector):
j_header_http = {
'Authorization': 'TODOPAGO f3d8b72c94ab4a06be2ef7c95490f7d3'
}
MTPConnector = MockTodoPagoConnector(j_header_http, "test")
instanceSARData = SendAuthorizeRequestData()
MTPConnector.sendAuthorize.return_value = instanceSARData.send_authorize_request_ok_response()
responseSAR = MTPConnector.sendAuthorize(
instanceSARData.get_options_SAR_comercio_params(),
instanceSARData.get_options_SAR_operation_params())
self.assertEqual(responseSAR['StatusCode'], -1)
@patch('todopagoconnector.TodoPagoConnector')
def test_get_credentials_fail(self, MockTodoPagoConnector):
j_header_http = {
'Authorization': 'TODOPAGO f3d8b72c94ab4a06be2ef7c95490f7d3'
}
MTPConnecto
|
r = MockTodoPagoConnector(j_header_http, "test")
instanceSAR = SendAuthorizeRequestData()
MTPConnector.sendAuthorize.return_value = instanceSAR.send_authorize_request_fail_response()
responseSAR = MTPConnector.sendAuthorize(
inst
|
anceSAR.get_options_SAR_comercio_params(),
instanceSAR.get_options_SAR_operation_params())
self.assertNotEquals(responseSAR['StatusCode'], -1)
@patch('todopagoconnector.TodoPagoConnector')
def test_get_credentials_702(self, MockTodoPagoConnector):
j_header_http = {
'Authorization': 'TODOPAGO f3d8b72c94ab4a06be2ef7c95490f7d3'
}
MTPConnector = MockTodoPagoConnector(j_header_http, "test")
instanceSAR = SendAuthorizeRequestData()
MTPConnector.sendAuthorize.return_value = instanceSAR.send_authorize_request_702_response()
responseSAR = MTPConnector.sendAuthorize(
instanceSAR.get_options_SAR_comercio_params(),
instanceSAR.get_options_SAR_operation_params())
self.assertNotEquals(responseSAR['StatusCode'], -1)
if __name__ == '__main__':
unittest.main()
|
elopio/snapcraft
|
tests/integration/general/test_clean_prime_step.py
|
Python
|
gpl-3.0
| 3,431
| 0
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from testtools.matchers import (
Contains,
DirExists,
FileExists,
Not
)
from tests import integration
class CleanPrimeStepTestCase(integration.TestCase):
def setUp(self):
super().setUp()
self.copy_project_to_cwd('independent-parts')
self.run_snapcraft('prime')
def test_clean_prime_step(self):
bindir = os.path.join(self.prime_dir, 'bin')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
output = self.run_snapcraft(
['clean', '--step=prime'], debug=False)
self.assertThat(self.prime_dir, Not(DirExists()))
self.assertThat(self.stage_dir, DirExists())
self.assertThat(self.parts_dir, DirExists())
# Assert that the priming area was removed wholesale, not a part at a
# time (since we didn't specify any parts).
self.assertThat(output, Contains("Cleaning up priming area"))
self.expectThat(output, Not(Contains('part1')))
self.expectThat(output, Not(Contains('part2')))
# Now try to prime again
self.run_snapcraft('prime')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
def test_clean_prime_step_single_part(self):
bindir = os.path.join(self.prime_dir, 'bin')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
self.run_snapcraft(['clean', 'part1', '--step=prime'])
self.assertThat(os.path.join(bindir, 'file1'), Not(FileExists()))
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
self.assertThat(self.stage_dir, DirExists())
self.assertThat(self.parts_dir, DirExists())
# Now try to prime again
self.run_snapcraft('prime')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
def test_clean_with_deprecated_strip_step(self):
bindir = os.path.join(self.prime_dir, 'bin')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
self.run_snapcraft(['clean', '--step=strip'])
self.assertThat(self.prime_dir, Not(DirExists()))
self.assertThat(self.stage_dir, DirExists())
self.assertThat(self.parts_dir, DirExists())
# Now try to prime again
self.run_snapcraft('prime')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
|
self.assertThat(os.path.j
|
oin(bindir, 'file2'), FileExists())
|
debugger06/MiroX
|
tv/lib/frontends/widgets/watchedfolders.py
|
Python
|
gpl-2.0
| 3,503
| 0.000571
|
# Miro - an RSS based video player application
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
# Participatory Culture Foundation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# In addition, as a special exception, the
|
copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
#
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do
|
so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
"""watchedsfolders.py -- Manages tracking watched folders. """
from miro import messages
from miro import signals
from miro.plat.frontends.widgets import widgetset
from miro.plat.utils import filename_to_unicode
class WatchedFolderManager(signals.SignalEmitter):
"""Manages tracking watched folders.
Attributes:
model -- TableModel object that contains the current list of watched
folders. It has 3 columns: id (integer), path (text) and
visible (boolean).
Signals:
changed -- The list of watched folders has changed
"""
def __init__(self):
signals.SignalEmitter.__init__(self, 'changed')
self.model = widgetset.TableModel('integer', 'text', 'boolean')
self._iter_map = {}
def handle_watched_folder_list(self, info_list):
"""Handle the WatchedFolderList message."""
for info in info_list:
iter = self.model.append(info.id, filename_to_unicode(info.path),
info.visible)
self._iter_map[info.id] = iter
self.emit('changed')
def handle_watched_folders_changed(self, added, changed, removed):
"""Handle the WatchedFoldersChanged message."""
self.handle_watched_folder_list(added)
for info in changed:
iter = self._iter_map[info.id]
self.model.update_value(iter, 1, filename_to_unicode(info.path))
self.model.update_value(iter, 2, info.visible)
for id in removed:
iter = self._iter_map.pop(id)
self.model.remove(iter)
self.emit('changed')
def change_visible(self, id_, visible):
"""Change if a watched folder is visible or not."""
messages.SetWatchedFolderVisible(id_, visible).send_to_backend()
def remove(self, id_):
"""Remove a watched folder."""
messages.DeleteWatchedFolder(id_).send_to_backend()
def add(self, path):
"""Add a new watched folder. It will be initially visible."""
messages.NewWatchedFolder(path).send_to_backend()
|
datasciencebr/serenata-de-amor
|
jarbas/core/views.py
|
Python
|
mit
| 867
| 0
|
from django.db import connection
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.c
|
ore.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
|
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
def healthcheck(request):
"""A simple view to run a health check in Django and in the database"""
with connection.cursor() as cursor:
cursor.execute('SELECT 1')
cursor.fetchone()
return HttpResponse()
|
pyhmsa/pyhmsa
|
pyhmsa/fileformat/xmlhandler/condition/elementalid.py
|
Python
|
mit
| 925
| 0.005405
|
"""
XML handler for element id c
|
ondition
"""
# Standard library modules.
# Third party modules.
# Local modules.
from pyhmsa.spec.condition.elementalid import ElementalID, ElementalIDXray
from pyhmsa.fileformat.xmlhandler.condition.condition import _ConditionXMLHandler
# Glo
|
bals and constants variables.
class ElementalIDXMLHandler(_ConditionXMLHandler):
def __init__(self, version):
super().__init__(ElementalID, version)
def convert(self, obj):
element = super().convert(obj)
element.find('Element').set('Symbol', obj.symbol) # manually add symbol
return element
class ElementalIDXrayXMLHandler(_ConditionXMLHandler):
def __init__(self, version):
super().__init__(ElementalIDXray, version)
def convert(self, obj):
element = super().convert(obj)
element.find('Element').set('Symbol', obj.symbol) # manually add symbol
return element
|
feigaochn/leetcode
|
p62_unique_paths.py
|
Python
|
mit
| 892
| 0
|
# author: Fei Gao
#
# Unique Paths
#
# A robot is located at the top-left corner of a m x n grid (marked 'Start' in
# the diagram below).
# The robot can only move either down or right at any point in time. The robot
# i
|
s trying to reach the bottom-right corner of the grid (marked 'Finish' in
# the diagram
|
below).
# How many possible unique paths are there?
# Above is a 3 x 7 grid. How many possible unique paths are there?
# Note: m and n will be at most 100.
class Solution:
# @return an integer
def uniquePaths(self, m, n):
mm = m - 1
nn = n - 1
if mm > nn:
mm, nn = nn, mm
res = 1
for i in range(mm):
res = res * (nn + mm - i)
res = res // (i + 1)
return res
def main():
solver = Solution()
print(solver.uniquePaths(2, 3))
pass
if __name__ == '__main__':
main()
pass
|
DrSpaceMonkey/script.pseudotv.live
|
resources/lib/EPGWindow.py
|
Python
|
gpl-3.0
| 58,386
| 0.008512
|
# Copyright (C) 2013 Lunatixz
#
#
# This file is part of PseudoTV.
#
# PseudoTV is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PseudoTV is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PseudoTV. If not, see <http://www.gnu.org/licenses/>.
import xbmc, xbmcgui, xbmcaddon
import subprocess, os
import time, threading
import datetime, traceback
import sys, re
import urllib
import urllib2
import fanarttv
from Playlist import Playlist
from Globals import *
from Channel import Channel
from ChannelList import ChannelList
from FileAccess import FileLock, FileAccess
from xml.etree import ElementTree as ET
from fanarttv import *
from Downloader import *
class EPGWindow(xbmcgui.WindowXMLDialog):
def __init__(self, *args, **kwargs):
self.focusRow = 0
self.focusIndex = 0
self.focusTime = 0
self.focusEndTime = 0
self.shownTime = 0
self.centerChannel = 0
self.rowCount = 6
self.channelButtons = [None] * self.rowCount
self.buttonCache = []
self.buttonCount = 0
self.actionSemaphore = threading.BoundedSemaphore()
self.lastActionTime = time.time()
self.channelLogos = ''
self.textcolor = "FFFFFFFF"
self.focusedcolor = "FF7d7d7d"
self.clockMode = 0
self.textfont = "font14"
self.startup = time.time()
self.showingInfo = False
self.infoOffset = 0
self.infoOffsetV = 0
self.Downloader = Downloader()
self.log('Using EPG Coloring = ' + str(REAL_SETTINGS.getSetting('EPGcolor_enabled')))
self.AltmediaPath = xbmc.translatePath(os.path.join(ADDON_INFO, 'resources', 'skins', 'default', 'media')) + '/'
#Set skin media folder, else default
if os.path.exists(xbmc.translatePath(os.path.join(ADDON_INFO, 'resources', 'skins', Skin_Select, 'media'))):
self.mediaPath = xbmc.translatePath(os.path.join(ADDON_INFO, 'resources', 'skins', Skin_Select, 'media')) + '/'
else:
self.mediaPath = self.AltmediaPath
self.log('Mediapath is ' + self.mediaPath)
# Use the given focus and non-focus textures if they exist. Otherwise use the defaults.
if os.path.exists(self.mediaPath + BUTTON_FOCUS):
self.textureButtonFocus = self.mediaPath + BUTTON_FOCUS
elif xbmc.skinHasImage(self.mediaPath + BUTTON_FOCUS):
self.textureButtonFocus = self.mediaPath + BUTTON_FOCUS
else:
self.textureButtonFocus = 'pstvlButtonFocus.png'
if os.path.exists(self.mediaPath + BUTTON_NO_FOCUS):
self.textureButtonNoFocus = self.mediaPath + BUTTON_NO_FOCUS
elif xbmc.skinHasImage(self.mediaPath + BUTTON_NO_FOCUS):
self.textureButtonNoFocus = self.mediaPath + BUTTON_NO_FOCUS
else:
self.textureButtonNoFocus = 'pstvlButtonNoFocus.png'
for i in range(self.rowCount):
self.channelButtons[i] = []
self.clockMode = ADDON_SETTINGS.getSetting("ClockMode")
self.toRemove = []
def onFocus(self, controlid):
pass
# set the time labels
def setTimeLabels(self, thetime):
self.log('setTimeLabels')
now = datetime.datetime.fromtimestamp(thetime)
self.getControl(104).setLabel(now.strftime('%A, %b %d'))
delta = datetime.timedelta(minutes=30)
for i in range(3):
if self.clockMode == "0":
self.getControl(101 + i).setLabel(now.strftime("%I:%M%p").lower())
else:
self.getControl(101 + i).setLabel(now.strftime("%H:%M"))
now = now + delta
self.log('setTimeLabels return')
self.log('thetime ' + str(now))
def log(self, msg, level = xbmc.LOGDEBUG):
log('EPGWindow: ' + msg, level)
def logDebug(self, msg, level = xbmc.LOGDEBUG):
if REAL_SETTINGS.getSetting('enable_Debug') == "true":
log('EPGWindow: ' + msg, level)
def onInit(self):
self.log('onInit')
timex, timey = self.getControl(120).getPosition()
timew = self.getControl(120).getWidth()
timeh = self.getControl(120).getHeight()
#Set timebar path, else use alt. path
if os.path.exists(xbmc.translatePath(os.path.join(ADDON_INFO, 'resources', 'skins', Skin_Select, 'media', TIME_BAR))):
self.currentTimeBar = xbmcgui.ControlImage(timex, timey, timew, timeh, self.mediaPath + TIME_BAR)
else:
self.currentTimeBar = xbmcgui.ControlImage(timex, timey, timew, timeh, self.AltmediaPath + TIME_BAR)
self.log('Mediapath Time_Bar = ' + self.mediaPath + TIME_BAR)
self.addControl(self.currentTimeBar)
### Skin labels, Set textcolor, focusedcolor and font. Rowcount todo ###
try:
textcolor = int(self.getControl(100).getLabel(), 16)
if textcolor > 0:
self.textcolor = hex(textcolor)[2:]
self.logDebug("onInit.Self.textcolor = " + str(self.textcolor))
except:
pass
try:
focusedcolor =
|
int(self.getControl(99).getLabel(), 16)
if focusedcolor > 0:
self.focusedcolor = hex(focusedcolor)[2:]
self.logDebug("onInit.Self.focusedcolor = " + str(self.focusedcolor))
except:
pass
try:
self.textfont = self.getControl(105).ge
|
tLabel()
self.logDebug("onInit.Self.textfont = " + str(self.textfont))
except:
pass
# try:
# self.rowCount = self.getControl(106).getLabel()
# self.logDebug("onInit, Self.rowCount = " + str(self.rowCount))
# except:
# pass
##################################################################
try:
if self.setChannelButtons(time.time(), self.MyOverlayWindow.currentChannel) == False:
self.log('Unable to add channel buttons')
return
curtime = time.time()
self.focusIndex = -1
basex, basey = self.getControl(113).getPosition()
baseh = self.getControl(113).getHeight()
basew = self.getControl(113).getWidth()
# set the button that corresponds to the currently playing show
for i in range(len(self.channelButtons[2])):
left, top = self.channelButtons[2][i].getPosition()
width = self.channelButtons[2][i].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
endtime = starttime + (width / (basew / 5400.0))
if curtime >= starttime and curtime <= endtime:
self.focusIndex = i
self.setFocus(self.channelButtons[2][i])
self.focusTime = int(time.time())
self.focusEndTime = endtime
break
# If nothing was highlighted, just select the first button
if self.focusIndex == -1:
self.focusIndex = 0
self.setFocus(self.channelButtons[2][0])
left, top = self.channelButtons[2][0].getPosition()
width = self.channelButtons[2][0].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
endtime = starttime + (width / (basew / 5400.0))
self.focusTime = int(starttime + 30)
self.focusEndTime = endtime
self.focusRow = 2
self.setSh
|
cmvac/demagorgon.repository
|
plugin.video.irmaospiologo/default.py
|
Python
|
gpl-2.0
| 1,296
| 0.013117
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# http://www.youtube.com/user/irmaospiologo
#------------------------------------------------------------
# License: GPL (http://www.gnu.org/licenses/gpl-3.0.html)
# Based on code from youtube addon
#------------------------------------------------------------
import os
import sys
import plugintools
import xbmc,xbmcaddon
from addon.common.addon import Addon
addonID = 'plugin.video.irmaospiologo'
addon = Addon(addonID, sys.argv)
local = xbmcaddon.Addon(id=addonID)
icon = local.getAddonInfo('icon')
YOUTUBE_CHANNEL_ID = "irmaospiologo"
# Entry point
def run():
plugintools.log("irmaospiologo.run")
# Get params
params = plugintools.get_params()
if params.get("action") is None:
main_list(params)
else:
action = params.get("action")
exec action+"(params)"
plugintools.close_item_list()
# Main menu
def main_list(params):
plugintools.log("irmaospiologo.main_list "+r
|
epr(p
|
arams))
plugintools.add_item(
#action="",
title="Cuidado! Improprio para menores! Entrar para comecar a zueira! XD",
url="plugin://plugin.video.youtube/user/"+YOUTUBE_CHANNEL_ID+"/",
thumbnail=icon,
folder=True )
run()
|
Mavrikant/WikiBots
|
deneme.py
|
Python
|
mit
| 826
| 0.007472
|
# -*- coding: utf-8 -*-
# !/usr/bin/python
# mavri kütüphanesi yükle
import mavri
# json kütüphanesi yükle
import json
wiki='tr.wikipedia'
username= 'Mavrikant Bot'
# Kullanıcı girişi yap
xx = mavri.login(wiki, username)
# Giriş denemesini sonucunu ekrana JSON ile düzenleyerek yazdır
print json.dumps(json.loads(xx.text), sort_keys=True, indent=4
|
)
# 2 bölümü birbirinden ayır
print "\n-------------------------------------------------------------------------\n"
# Deneme sayfasına mesaj ekle.
sonuc = mavri.appendtext_on_page('tr.wikipedia', 'Vikipedi:Deneme tahtası', '\n== mavribot test ==\nDeneme deneme 123 --~~~~', 'mavribot ile test yapıldı.', xx)
# Sonucu ekrana JSON ile düzenleyerek yazdır
|
print json.dumps(json.loads(sonuc.text), sort_keys=True, indent=4)
# Programı kapat
exit(0)
|
fbradyirl/home-assistant
|
tests/components/litejet/test_scene.py
|
Python
|
apache-2.0
| 1,928
| 0.001037
|
"""The tests for the litejet component."""
import logging
import unittest
from unittest import mock
from homeassistant import setup
from homeassistant.components import litejet
from tests.common import get_test_home_assistant
from tests.components.scene import common
_LOGGER = logging.getLogger(__name__)
ENTITY_SCENE = "scene.mock_scene_1"
ENTITY_SCENE_NUMBER = 1
ENTITY_OTHER_SCENE = "scene.mock_scene_2"
ENTITY_OTHER_SCENE_NUMBER = 2
class TestLiteJetScene(unittest.TestCase):
"""Test the litejet component."""
@mock.patch("pylitejet.LiteJet")
def setup_method(self, method, mock_pylitejet):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.start()
def get_scene_name(number):
retur
|
n "Mock Scene #" + str(number)
self.mock_lj = mock_pylitejet.return_value
self.mock_lj.lo
|
ads.return_value = range(0)
self.mock_lj.button_switches.return_value = range(0)
self.mock_lj.all_switches.return_value = range(0)
self.mock_lj.scenes.return_value = range(1, 3)
self.mock_lj.get_scene_name.side_effect = get_scene_name
assert setup.setup_component(
self.hass, litejet.DOMAIN, {"litejet": {"port": "/tmp/this_will_be_mocked"}}
)
self.hass.block_till_done()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def scene(self):
"""Get the current scene."""
return self.hass.states.get(ENTITY_SCENE)
def other_scene(self):
"""Get the other scene."""
return self.hass.states.get(ENTITY_OTHER_SCENE)
def test_activate(self):
"""Test activating the scene."""
common.activate(self.hass, ENTITY_SCENE)
self.hass.block_till_done()
self.mock_lj.activate_scene.assert_called_once_with(ENTITY_SCENE_NUMBER)
|
KamilSzot/365_programs
|
2017-01-27/input_data.py
|
Python
|
unlicense
| 6,497
| 0.000154
|
#!/usr/bin/env python
# This file comes from here: https://github.com/llSourcell/tensorflow_demo/blob/master/input_data.py
"""Functions for downloading and reading MNIST data."""
import gzip
import os
from six.moves.urllib.request import urlretrieve
import numpy
SOURCE_URL = 'http://yann.lecun.com/exdb/mnist/'
def maybe_download(filename, work_directory):
"""Download the data from Yann's website, unless it's already here."""
if not os.path.exists(work_directory):
os.mkdir(work_directory)
filepath = os.path.join(work_directory, filename)
if not os.path.exists(filepath):
filepath, _ = urlretrieve(SOURCE_URL + filename, filepath)
statinfo = os.stat(filepath)
print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')
return filepath
def _read32(bytestream):
dt = numpy.dtype(numpy.uint32).newbyteorder('>')
return numpy.frombuffer(bytestream.read(4), dtype=dt)[0]
def extract_images(filename):
"""Extract the images into a 4D uint8 numpy array [index, y, x, depth]."""
print('Extracting', filename)
with gzip.open(filename) as bytestream:
magic = _read32(bytestream)
if magic != 2051:
raise ValueError(
'Invalid magic number %d in MNIST image file: %s' %
(magic, filename))
num_images = _read32(bytestream)
rows = _read32(bytestream)
cols = _read32(bytestream)
buf = bytestream.read(rows * cols * num_images)
data = numpy.frombuffer(buf, dtype=numpy.uint8)
data = data.reshape(num_images, rows, cols, 1)
return data
def dense_to_one_hot(labels_dense, num_classes=10):
"""Convert class labels from scalars to one-hot vectors."""
num_labels = labels_dense.shape[0]
index_offset = numpy.arange(num_labels) * num_classes
labels_one_hot = numpy.zeros((num_labels, num_classes))
labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
return labels_one_hot
def extract_labels(filename, one_hot=False):
"""Extract the labels into a 1D uint8 numpy array [index]."""
print('Extracting', filename)
with gzip.open(filename) as bytestream:
magic = _read32(bytestream)
if magic != 2049:
raise ValueError(
'Invalid magic number %d in MNIST label file: %s' %
(magic, filename))
num_items = _read32(bytestream)
buf = bytestream.read(num_items)
labels = numpy.frombuffer(buf, dtype=numpy.uint8)
if one_hot:
return dense_to_one_hot(labels)
return labels
class DataSet(object):
def __init__(s
|
elf, images, labels, fake_da
|
ta=False):
if fake_data:
self._num_examples = 10000
else:
assert images.shape[0] == labels.shape[0], (
"images.shape: %s labels.shape: %s" % (images.shape,
labels.shape))
self._num_examples = images.shape[0]
# Convert shape from [num examples, rows, columns, depth]
# to [num examples, rows*columns] (assuming depth == 1)
assert images.shape[3] == 1
images = images.reshape(images.shape[0],
images.shape[1] * images.shape[2])
# Convert from [0, 255] -> [0.0, 1.0].
images = images.astype(numpy.float32)
images = numpy.multiply(images, 1.0 / 255.0)
self._images = images
self._labels = labels
self._epochs_completed = 0
self._index_in_epoch = 0
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
@property
def num_examples(self):
return self._num_examples
@property
def epochs_completed(self):
return self._epochs_completed
def next_batch(self, batch_size, fake_data=False):
"""Return the next `batch_size` examples from this data set."""
if fake_data:
fake_image = [1.0 for _ in xrange(784)]
fake_label = 0
return [fake_image for _ in xrange(batch_size)], [
fake_label for _ in xrange(batch_size)]
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
# Finished epoch
self._epochs_completed += 1
# Shuffle the data
perm = numpy.arange(self._num_examples)
numpy.random.shuffle(perm)
self._images = self._images[perm]
self._labels = self._labels[perm]
# Start next epoch
start = 0
self._index_in_epoch = batch_size
assert batch_size <= self._num_examples
end = self._index_in_epoch
return self._images[start:end], self._labels[start:end]
def read_data_sets(train_dir, fake_data=False, one_hot=False):
class DataSets(object):
pass
data_sets = DataSets()
if fake_data:
data_sets.train = DataSet([], [], fake_data=True)
data_sets.validation = DataSet([], [], fake_data=True)
data_sets.test = DataSet([], [], fake_data=True)
return data_sets
TRAIN_IMAGES = 'train-images-idx3-ubyte.gz'
TRAIN_LABELS = 'train-labels-idx1-ubyte.gz'
TEST_IMAGES = 't10k-images-idx3-ubyte.gz'
TEST_LABELS = 't10k-labels-idx1-ubyte.gz'
VALIDATION_SIZE = 5000
local_file = maybe_download(TRAIN_IMAGES, train_dir)
train_images = extract_images(local_file)
local_file = maybe_download(TRAIN_LABELS, train_dir)
train_labels = extract_labels(local_file, one_hot=one_hot)
local_file = maybe_download(TEST_IMAGES, train_dir)
test_images = extract_images(local_file)
local_file = maybe_download(TEST_LABELS, train_dir)
test_labels = extract_labels(local_file, one_hot=one_hot)
validation_images = train_images[:VALIDATION_SIZE]
validation_labels = train_labels[:VALIDATION_SIZE]
train_images = train_images[VALIDATION_SIZE:]
train_labels = train_labels[VALIDATION_SIZE:]
data_sets.train = DataSet(train_images, train_labels)
data_sets.validation = DataSet(validation_images, validation_labels)
data_sets.test = DataSet(test_images, test_labels)
return data_sets
|
nacc/autotest
|
client/net/net_utils_mock.py
|
Python
|
gpl-2.0
| 2,952
| 0.004743
|
"""Set of Mocks and stubs for network utilities unit tests.
Implement a set of mocks and stubs use to implement unit tests
for the network libraries.
"""
import socket
from autotest.client.shared.test_utils import mock
from autotest.client.net import net_utils
def os_open(*args, **kwarg):
return os_stub('open')
class os_stub(mock.mock_function):
def __init__(self, symbol, **kwargs):
mock.mock_function.__init__(self, symbol, *kwargs)
readval = ""
def open(self, *args, **kwargs):
return self
def read(self, *args, **kwargs):
ret
|
urn os_stub.readval
def netutils_netif(iface):
return netif_stub(iface, 'net_utils', net_utils.netif)
class netif_stub(mock.mock_class):
def __init__(self, iface, cls, name, *
|
args, **kwargs):
mock.mock_class.__init__(self, cls, name, args, *kwargs)
def wait_for_carrier(self, timeout):
return
class socket_stub(mock.mock_class):
"""Class use to mock sockets."""
def __init__(self, iface, cls, name, *args, **kwargs):
mock.mock_class.__init__(self, cls, name, args, *kwargs)
self.recv_val = ''
self.throw_timeout = False
self.send_val = None
self.timeout = None
self.family = None
self.type = None
def close(self):
pass
def socket(self, family, type):
self.family = family
self.type = type
def settimeout(self, timeout):
self.timeout = timeout
return
def send(self, buf):
self.send_val = buf
def recv(self, size):
if self.throw_timeout:
raise socket.timeout
if len(self.recv_val) > size:
return self.recv_val[:size]
return self.recv_val
def bind(self, arg):
pass
class network_interface_mock(net_utils.network_interface):
def __init__(self, iface='some_name', test_init=False):
self._test_init = test_init # test network_interface __init__()
if self._test_init:
super(network_interface_mock, self).__init__(iface)
return
self.ethtool = '/mock/ethtool'
self._name = iface
self.was_down = False
self.orig_ipaddr = '1.2.3.4'
self.was_loopback_enabled = False
self._socket = socket_stub(iface, socket, socket)
self.loopback_enabled = False
self.driver = 'mock_driver'
def is_down(self):
if self._test_init:
return 'is_down'
return super(network_interface_mock, self).is_down()
def get_ipaddr(self):
if self._test_init:
return 'get_ipaddr'
return super(network_interface_mock, self).get_ipaddr()
def is_loopback_enabled(self):
if self._test_init:
return 'is_loopback_enabled'
return self.loopback_enabled
def get_driver(self):
return self.driver
def wait_for_carrier(self, timeout=1):
return
|
rvrheenen/OpenKattis
|
Python/conundrum/conundrum.py
|
Python
|
mit
| 128
| 0.023438
|
cypher = input()
per = "PER
|
"
count = 0
for i in range(len(cypher)):
if cypher[i] != per[i%3]:
count +=
|
1
print(count)
|
freevo/freevo1
|
src/tv/plugins/xawtv.py
|
Python
|
gpl-2.0
| 9,292
| 0.004305
|
# -*- coding: iso-8859-1 -*-
# -----------------------------------------------------------------------
# xawtv.py - use xawtv for tv viewing
# -----------------------------------------------------------------------
# $Id$
#
# Notes:
# Todo:
#
# -----------------------------------------------------------------------
# Freevo - A Home Theater PC framework
# Copyright (C) 2002 Krister Lagerstrom, et al.
# Please see the file freevo/Docs/CREDITS for a complete list of authors.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MER-
# CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# -----------------------------------------------------------------------
import logging
logger = logging.getLogger("freevo.tv.plugins.xawtv")
import config
import time, os
import string
import signal
import re
import util # Various utilities
import rc # The RemoteControl class.
import childapp # Handle child applications
import tv.epg_xmltv as epg # The Electronic Program Guide
import event as em
from tv.channels import FreevoChannels
import plugin
class PluginInterface(plugin.Plugin):
"""
Plugin to watch tv with xawtv. very beta use at your own risk.
to activate:
| plugin.activate('tv.xawtv', args=('/usr/bin/xawtv', '/usr/bin/xawtv-remote',))
replace the paths for the programs fo wherever you installed them.
currently only remote support really works well. Keyboard is taken by
xawtv so you have to know its keys. Also you need a .xawtv file in the
homedir of whoever is running the program. it must be synced up with your
tv_channels variable or you will get wierd behavior. Only base video
groups functionality on startup and no vg switching on ch+/ch- at the
moment.
"""
def __init__(self, app, remote):
plugin.Plugin.__init__(self)
#XXX might want to check to see if .xawtv present.
# we really don't have much of a prayer if it isn't
# create the xawtv object and register it
plugin.register(Xawtv(app, remote), plugin.TV)
class Xawtv:
__muted = 0
__igainvol = 0
def __init__(self, app, remote):
self.tuner_chidx = 0 # Current channel, index into config.TV_CHANNELS
self.event_context = 'tv'
self.fc = FreevoChannels()
self.current_vg = None
self.xawtv_prog = app
self.remote_prog = remote
def TunerSetChannel(self, tuner_channel):
for pos in range(len(config.TV_CHANNELS)):
channel = config.TV_CHANNELS[pos]
if channel[2] == tuner_channel:
self.tuner_chidx = pos
return
print 'ERROR: Cannot find tuner channel "%s" in the TV channel listing' % tuner_channel
self.tuner_chidx = 0
def TunerGetChannelInfo(self):
'''Get program info for the current channel'''
tuner_id = config.TV_CHANNELS[self.tuner_chidx][2]
chan_name = config.TV_CHANNELS[self.tuner_chidx][1]
chan_id = config.TV_CHANNELS[self.tuner_chidx][0]
channels = epg.get_guide().get_programs(time.time(), time.time(), chan_id)
if channels and channels[0] and channels[0].programs:
start_s = time.strftime(config.TV_TIME_FORMAT, time.localtime(channels[0].programs[0].start))
stop_s = time.strftime(config.TV_TIME_FORMAT, time.localtime(channels[0].programs[0].stop))
ts = '(%s-%s)' % (start_s, stop_s)
prog_info = '%s %s' % (ts, channels[0].programs[0].title)
else:
prog_info = 'No info'
return tuner_id, chan_name, prog_info
def TunerGetChannel(self):
return config.TV_CHANNELS[self.tuner_chidx][2]
def TunerNextChannel(self):
self.tuner_chidx = (self.tuner_chidx+1) % len(config.TV_CHANNELS)
def TunerPrevChannel(self):
self.tuner_chidx = (self.tuner_chidx-1) % len(config.TV_CH
|
ANNELS)
def Play(self, mode, tuner_channel=None, channel_change=0):
if tuner_channel != None:
try:
self.TunerSetChannel(tuner_channel)
except ValueError:
pass
if not tuner_channel:
tuner_channel = self.fc.getChannel()
vg = self.current_vg = self.fc.getVid
|
eoGroup(tuner_channel, True)
if not vg.group_type == 'normal':
print 'Xawtv only supports normal. "%s" is not implemented' % vg.group_type
return
if mode == 'tv' or mode == 'vcr':
w, h = config.TV_VIEW_SIZE
cf_norm = vg.tuner_norm
cf_input = vg.input_num
cf_device = vg.vdev
s_norm = cf_norm.upper()
if mode == 'vcr':
cf_input = '1'
if hasattr(config, "TV_VCR_INPUT_NUM") and config.TV_VCR_INPUT_NUM:
cf_input = config.TV_VCR_INPUT_NUM
if hasattr(config, "TV_XAWTV_OPTS") and config.TV_XAWTV_OPTS:
daoptions = config.TV_XAWTV_OPTS
else:
daoptions = '-xv -f'
command = '%s %s -device %s ' % (self.xawtv_prog,
daoptions,
cf_device)
else:
print 'Mode "%s" is not implemented' % mode # BUG ui.message()
return
self.mode = mode
mixer = plugin.getbyname('MIXER')
# BUG Mixer manipulation code.
# TV is on line in
# VCR is mic in
# btaudio (different dsp device) will be added later
if mixer and config.MIXER_MAJOR_CTRL == 'VOL':
mixer_vol = mixer.getMainVolume()
mixer.setMainVolume(0)
elif mixer and config.MIXER_MAJOR_CTRL == 'PCM':
mixer_vol = mixer.getPcmVolume()
mixer.setPcmVolume(0)
# Start up the TV task
self.app=XawtvApp(command, self.remote_prog)
if tuner_channel:
time.sleep(0.5)
self.app.sendcmd('setstation %s' % tuner_channel)
#XXX use remote to change the input we want
rc.add_app(self)
# Suppress annoying audio clicks
time.sleep(0.4)
# BUG Hm.. This is hardcoded and very unflexible.
if mixer and mode == 'vcr':
mixer.setMicVolume(config.MIXER_VOLUME_VCR_IN)
elif mixer:
mixer.setLineinVolume(config.MIXER_VOLUME_TV_IN)
mixer.setIgainVolume(config.MIXER_VOLUME_TV_IN)
if mixer and config.MIXER_MAJOR_CTRL == 'VOL':
mixer.setMainVolume(mixer_vol)
elif mixer and config.MIXER_MAJOR_CTRL == 'PCM':
mixer.setPcmVolume(mixer_vol)
logger.debug('%s: started %s app', time.time(), self.mode)
def Stop(self, channel_change=0):
mixer = plugin.getbyname('MIXER')
if mixer and not channel_change:
mixer.setLineinVolume(0)
mixer.setMicVolume(0)
mixer.setIgainVolume(0) # Input on emu10k cards.
self.app.stop()
rc.remove_app(self)
def eventhandler(self, event, menuw=None):
logger.debug('%s: %s app got %s event', time.time(), self.mode, event)
if event == em.STOP or event == em.PLAY_END:
self.app.sendcmd('quit')
time.sleep(1)
self.Stop()
rc.post_event(em.PLAY_END)
return True
elif event == em.TV_CHANNEL_UP or event == em.TV_CHANNEL_DOWN:
if self.mode == 'vcr':
return
if event == em.TV_CHANNEL_UP:
self.TunerPrevChannel()
self.app.sendc
|
mwaskom/seaborn
|
examples/anscombes_quartet.py
|
Python
|
bsd-3-clause
| 430
| 0
|
"""
Anscombe's quartet
==================
_thumb: .4, .4
"""
import seaborn as sns
sns.set_theme(style="ticks")
# Load the example dataset for Anscombe's quartet
df = sns.load_dataset("anscombe")
# Show the results of a linea
|
r regression within each dataset
sns.lmplot(x="x", y="y", col="da
|
taset", hue="dataset", data=df,
col_wrap=2, ci=None, palette="muted", height=4,
scatter_kws={"s": 50, "alpha": 1})
|
663project/fastica_lz
|
fastica_lz/fastica_lz.py
|
Python
|
mit
| 2,082
| 0.022574
|
# coding: utf-8
# In[ ]:
import numpy as np
import numexpr as ne
def sym_decorrelation_ne(W):
""" Symmetric decorrelation """
K = np.dot(W, W.T)
s, u = np.linalg.eigh(K)
return (u @ np.diag(1.0/np.sqrt(s)) @ u.T) @ W
# logcosh
def g_logcosh_ne(wx,alpha):
"""derivatives of logcosh"""
return ne.evaluate('tanh(alpha * wx)')
def gprime_logcosh_ne(wx,alpha):
"""second derivatives of logcosh"""
return alpha * (1-ne.evaluate('tanh(alpha*wx)**2'))
# exp
def g_exp_ne(wx,alpha):
"""derivatives of exp"""
return ne.evaluate('wx * exp(-wx**2/2)')
def gprime_exp_ne(wx,alpha):
"""second derivatives of exp"""
return (1-np.square(wx)) * ne.evaluate('exp(-wx**2/2)')
def fastica_s(X, f,alpha=None,n_comp=None,maxit=200, tol=1e-04):
n,p = X.shape
#check if n_comp is valid
if n_comp is None:
n_comp = min(n,p)
elif n_comp > min(n,p):
print("n_comp is too large")
n_comp = min(n,p)
#centering
#by subtracting the mean of each column of X (array).
X = X - X.mean(axis=0)[None,:]
X = X.T
#whitening
s = np.linalg.svd(X @ (X.T) / n)
D = np.diag(1/np.sqrt(s[1]))
k = D @ (s[0].T)
k = k[:n_co
|
mp,:]
X1 = k @ X
# initial random weght vector
w_init = np.random.normal(size=(n_comp, n_comp))
W = sym_decorrelation_ne(w_init)
lim = 1
it = 0
# The FastICA algorithm
while lim > tol and it < maxit :
wx = W @ X1
if f =="logcosh":
gwx = g_logcosh_ne(wx,alpha)
g_wx = gprime_logcosh_ne(wx,alpha)
|
elif f =="exp":
gwx = g_exp_ne(wx,alpha)
g_wx = gprimeg_exp_ne(wx,alpha)
else:
print("doesn't support this approximation negentropy function")
W1 = np.dot(gwx,X1.T)/X1.shape[1] - np.dot(np.diag(g_wx.mean(axis=1)),W)
W1 = sym_decorrelation_ne(W1)
it = it +1
lim = np.max(np.abs(np.abs(np.diag(W1 @ W.T))) - 1.0)
W = W1
S = W @ X1
#A = np.linalg.inv(W @ k)
return{'X':X1.T,'S':S.T}
|
Muges/audiotsm
|
examples/sine.py
|
Python
|
mit
| 957
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
sine
~~~~
Run a TSM procedure on a signal generated with numpy.
"""
# pylint: disable=invalid-name
import numpy as np
import sounddevice as sd
from audiotsm import wsola
from audiotsm.io.array import ArrayReader, ArrayWriter
# The parameters of the input signal
length = 1
|
# in seconds
samplerate = 44100 # in Hz
frequency = 440 # an A4
# Generate the input signal
time
|
= np.linspace(0, length, int(length * samplerate))
input_signal = np.sin(np.pi * frequency * time).reshape((1, -1))
# Run the TSM procedure
reader = ArrayReader(input_signal)
writer = ArrayWriter(channels=1)
tsm = wsola(channels=1, speed=0.5)
tsm.run(reader, writer)
# Play the output
# This example was written to show how to use an ArrayWriter. If you want to
# play the output of a TSM procedure you should use an
# audiotsm.io.stream.StreamWriter.
sd.play(np.ascontiguousarray(writer.data.T), samplerate, blocking=True)
|
miniworld-project/miniworld_core
|
miniworld/concurrency/StopThread.py
|
Python
|
mit
| 883
| 0.001133
|
from threading import Event
import threading
# encoding: utf-8
__author__ = "Nils Tobias Schmidt"
__email__ = "schmidt89 at informatik.uni-marburg.de"
class StopThread(threading.Thread):
""" Extends the `Thread` with an `Event` and the `terminate` method
like the `multiprocessing` api offers it.
Calling it will trigger the `Eve
|
nt`.
Just implement your cleanup code for this event.
"""
|
def __init__(self, *args, **kwargs):
super(StopThread, self).__init__(*args, **kwargs)
self.shall_terminate_event = Event()
def terminate(self):
""" Immitate the `processing` API and offer a way to do some clean up in the `Thread`. """
self.shall_terminate_event.set()
def shall_terminate(self):
""" Can be queried to know if the `Thread` shall do some cleanup """
return self.shall_terminate_event.is_set()
|
VeritasOS/cloud-custodian
|
c7n/resources/iot.py
|
Python
|
apache-2.0
| 1,009
| 0
|
# Copyright 2016 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language gov
|
erning permissions and
|
# limitations under the License.
from c7n.query import QueryResourceManager
from c7n.manager import resources
@resources.register('iot')
class IoT(QueryResourceManager):
class resource_type(object):
service = 'iot'
enum_spec = ('list_things', 'things', None)
name = "thingName"
id = "thingName"
dimension = None
default_report_fields = (
'thingName',
'thingTypeName'
)
|
BPI-SINOVOIP/BPI-Mainline-kernel
|
toolchains/gcc-linaro-7.3.1-2018.05-x86_64_arm-linux-gnueabihf/share/gdb/python/gdb/FrameDecorator.py
|
Python
|
gpl-2.0
| 10,392
| 0.00154
|
# Copyright (C) 2013-2018 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gdb
# This small code snippet deals with problem of strings in Python 2.x
# and Python 3.x. Python 2.x has str and unicode classes which are
# sub-classes of basestring. In Python 3.x all strings are encoded
# and basestring has been removed.
try:
basestring
except NameError:
basestring = str
class FrameDecorator(object):
"""Basic implementation of a Frame Decorator"""
""" This base frame decorator decorates a frame or another frame
decorator, and provides convenience methods. If this object is
wrapping a frame decorator, defer to that wrapped object's method
if it has one. This allows for frame decorators that have
sub-classed FrameDecorator object, but also wrap other frame
decorators on the same frame to correctly execute.
E.g
If the result of frame filters running means we have one gdb.Frame
wrapped by multiple frame decorators, all sub-classed from
FrameDecorator, the resulting hierarchy will be:
Decorator1
-- (wraps) Decorator2
-- (wraps) FrameDecorator
-- (wraps) gdb.Frame
In this case we have two frame decorators, both of which are
sub-classed from FrameDecorator. If Decorator1 just overrides the
'function' method, then all of the other methods are carried out
by the super-class FrameDecorator. But Decorator2 may have
overriden other methods, so FrameDecorator will look at the
'base' parameter and defer to that class's methods. And so on,
down the chain."""
# 'base' can refer to a gdb.Frame or another frame decorator. In
# the latter case, the child class will have called the super
# method and _base will be an object conforming to the Frame Filter
# class.
def __init__(self, base):
self._base = base
@staticmethod
def _is_limited_frame(frame):
"""Internal utility to determine if the frame is special or
limited."""
sal = frame.find_sal()
if (not sal.symtab or not sal.symtab.filename
or frame.type() == gdb.DUMMY_FRAME
or frame.type() == gdb.SIGTRAMP_FRAME):
return True
return False
def elided(self):
"""Return any elided frames that this class might be
wrapping, or None."""
if hasattr(self._base, "elided"):
return self._base.elided()
return None
def function(self):
""" Return the name of the frame's function or an address of
the function of the frame. First determine if this is a
special frame. If not, try to determine filename from GDB's
frame internal function API. Finally, if a name cannot be
determined return the address. If this function returns an
address, GDB will attempt to determine the function name from
its internal minimal symbols store (for example, for inferiors
without debug-info)."""
# Both gdb.Frame, and FrameDecorator have a method called
# "function", so determine which object this is.
if not isinstance(self._base, gdb.Frame):
if hasattr(self._base, "function"):
# If it is not a gdb.Frame, and there is already a
# "function" method, use that.
return self._base.function()
frame = self.inferior_frame()
if frame.type() == gdb.DUMMY_FRAME:
return "<function called from gdb>"
elif frame.type() == gdb.SIGTRAMP_FRAME:
return "<signal handler called>"
func = frame.function()
# If we cannot determine the function name, return the
# address. If GDB detects an integer value from this function
# it will attempt to find the function name from minimal
# symbols via its own internal functions.
if func == None:
pc = frame.pc()
return pc
return str(func)
def address(self):
""" Return the address of the frame's pc"""
if hasattr(self._base, "address"):
return self._base.address()
frame = self.inferior_frame()
return frame.pc()
def filename(self):
""" Return the filename associated with this frame, detecting
and returning the appropriate library name is this is a shared
library."""
if hasattr(self._base, "filename"):
return self._base.filename()
frame = self.inferior_frame()
sal = frame.find_sal()
if not sal.symtab or not sal.symtab.filename:
pc = frame.pc()
return gdb.solib_name(pc)
else:
return sal.symtab.filename
def frame_args(self):
""" Return an iterable of frame arguments for this frame, if
any. The iterable object contains objects conforming with the
Symbol/Value interface. If there are no frame arguments, or
if this frame is deemed to be a special case, return None."""
if hasattr(self._base, "frame_args"):
return self._base.frame_args()
frame = self.inferior_frame()
if self._is_limited_frame(frame):
return None
args = FrameVars(frame)
return args.fetch_frame_args()
def frame_locals(self):
""" Return an iterable of local variables for this frame, if
any. The iterable object contains objects conforming with the
Symbol/Value interface. If there are no frame locals, or if
this frame is deemed to be a special case, return None."""
if hasattr(self._base, "frame_locals"):
return self._base.frame_locals()
frame = self.inferior_frame()
if self._is_limited_frame(frame):
return None
args = FrameVars(frame)
return args.fetch_frame_locals()
def line(self):
""" Return line number information associated with the frame's
pc. If symbol table/line information does not exist, or if
this frame is deemed to be a special case, return None"""
if hasattr(self._base, "line"):
return self._base.line()
frame = self.inferior_frame()
if self._is_limited_frame(frame):
return None
sal = frame.find_sal()
if (sal):
return sal.line
else:
return None
def inferior_frame(self):
""" Return the gdb.Frame underpinning this frame decorator."""
# If 'base' is a frame decorator, we want to call its inferior
# frame method. If '_base' is a gdb.Frame, just return that.
if hasattr(self._base, "inferior_frame"):
return self._base.inferior_frame()
return self._base
class SymValueWrapper(object):
"""A container class conforming to the Symbol/Value interface
which holds frame locals or frame arguments."""
def __init__(self, symbol, value):
self.sym = symbol
self.val = value
def value(self):
""" Return the value associated with this symbol, or None"""
return self.val
def symbol(self):
""" Return the symbol, or Python text, associated with this
symbol, or None"""
|
return self.sym
class FrameVars(object):
"""Utility class to fetch and store frame local variables, or
frame argu
|
ments."""
def __init__(self, frame):
self.frame = frame
self.symbol_class = {
gdb.SYMBOL_LOC_STATIC: True,
gdb.SYMBOL_LOC_REGISTER: True,
|
OCA/carrier-delivery
|
delivery_price_by_category/models/delivery_carrier.py
|
Python
|
agpl-3.0
| 2,275
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2018 Simone Rubino - Agile Business Group
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, models
from odoo.tools import safe_eval
class DeliveryCarrier(models.Model):
_inherit = 'delivery.carrier'
@api.multi
def get_price_available(self, order):
self.ensure_one()
category_price = 0.0
price_dict = self.get_price_dict(order)
for line in self.price_rule_ids:
if line.product_category_id:
products = order.mapped('order_line.product_id')
test = any(product.categ_id == line.product_category_id
for product in products)
if test:
category_price = line.product_category_price
|
break
else:
test = safe_eval(
line.variable + line.operator + str(line.max_value),
price_dict)
if test:
break
if category_price:
return category_price
# Note that this will evaluate all the price_rule_ids again and
# our category rules might interfere withthe correct computation
return super(DeliveryCarrier, self).get_price_availabl
|
e(order)
def get_price_dict(self, order):
weight = volume = quantity = 0
total_delivery = 0.0
for line in order.order_line:
if line.state == 'cancel':
continue
if line.is_delivery:
total_delivery += line.price_total
if not line.product_id or line.is_delivery:
continue
qty = line.product_uom._compute_quantity(
line.product_uom_qty, line.product_id.uom_id)
weight += (line.product_id.weight or 0.0) * qty
volume += (line.product_id.volume or 0.0) * qty
quantity += qty
total = (order.amount_total or 0.0) - total_delivery
total = order.currency_id.with_context(date=order.date_order) \
.compute(total, order.company_id.currency_id)
return {'price': total, 'volume': volume, 'weight': weight,
'wv': volume * weight, 'quantity': quantity}
|
8l/beri
|
cheritest/trunk/tests/cp2/test_cp2_creturn_trap.py
|
Python
|
apache-2.0
| 2,300
| 0.003478
|
#-
# Copyright (c) 2013 Michael Roe
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
from nose.plugins.attrib import attr
#
# Test that the CReturn instruction causes a trap to the CCall exception handler
#
class test_cp2_c
|
return_trap(BaseBERITestCase):
@attr('capabilities')
def test_cp2_creturn1(self):
'''Test that creturn causes a trap'''
self.assertRegisterEqual(self.MIPS.a2, 2,
"creturn did not cause the right trap handler to be run")
@attr('capabilities')
def test_cp_creturn2(self):
'''Test that creturn sets the cap cause register'''
self.assertRegisterEqual(self.MIPS.a3, 0x06ff,
"creturn did not set capability cause correctly")
@attr('capabi
|
lities')
def test_cp_creturn3(self):
'''Test that $kcc is copied to $pcc when trap handler runs'''
self.assertRegisterEqual(self.MIPS.a4, 0x7fffffff,
"$pcc was not set to $kcc on entry to trap handler")
@attr('capabilities')
def test_cp_creturn4(self):
'''Test that creturn restored full perms to $pcc'''
self.assertRegisterEqual(self.MIPS.a6, 0x7fffffff,
"creturn did not restore full perms to $pcc")
|
AndyGrant/EtherealBenchmarking
|
EtherBench/views.py
|
Python
|
gpl-3.0
| 11,198
| 0.012413
|
from django.contrib.auth import authenticate
from django.contrib.auth import login as loginUser
from django.contrib.auth import logout as logoutUser
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render as djangoRender
from django.views.decorators.csrf import csrf_exempt
from EtherBench.models import Engine, Opponent, Matchup, EngineTest
from EtherBench.config import *
import EtherBench.utils
import EtherBench.stats
from random import sample
def render(request, template, data):
# Wrapper around the django.shortcuts.render method. Always
# include the defaults found in config.py in the context dict
data.update(FRAMEWORK_DEFAULTS)
return djangoRender(request, "EtherBench/" + template, data)
def register(request):
# User trying to view the registration page
if request.method == "GET":
return render(request, "register.html", {})
# User made a post request to /regsister.html, process it
try:
# Attempt to create and login the new user
user = User.objects.create_user(requ
|
est.POST["username"], request.POST["email"], request.POST["password"])
user.save()
loginUser(request, user)
# Send them back to the home page
return index(request)
except Exception as err:
return index(request,
|
"Unable to Register (Missing Field / Name Taken)")
def login(request):
# User trying to view the login page
if request.method == "GET":
return render(request, "login.html", {})
# User made a post request to /login.html, process it
try:
# Attempt to login the user, and send them back to the index
user = authenticate(username=request.POST["username"], password=request.POST["password"])
loginUser(request, user)
return HttpResponseRedirect("/index/")
except Exception as err:
return index(request, "Unable to Login (Invalid Credentials)")
def logout(request):
# Logout the user and send them back to the index
logoutUser(request)
return HttpResponseRedirect("/index/")
def index(request, errormessage=""):
enginetests = list(EngineTest.objects.all())
active = []; inactive = []
for enginetest in enginetests:
if enginetest.passed or enginetest.failed:
inactive.append(enginetest)
else:
active.append(enginetest)
active.sort(key=lambda x: -100 * x.priority - x.tscore)
inactive.sort(key=lambda x: x.creation)
data = {
"enginetests" : active + inactive[::-1],
"errormessage" : errormessage,
}
return render(request, "index.html", data)
def opponents(request):
# User made a request to see all of the opponents' information
data = {"opponents" : list(map(Opponent.dictionary, Opponent.objects.all()))}
return render(request, "opponents.html", data)
def engineTest(request, enginetestid):
# User made a request to view an engine test's information
try:
# Put the engine test in the context dict if the engine test exists
enginetest = EngineTest.objects.get(id=enginetestid)
data = {"enginetest" : enginetest.dictionary()}
return render(request, "enginetest.html", data)
except:
return index("Requested EngineTest ID Does Not Exist")
@staff_member_required
def newOpponent(request):
# User trying to view the new opponent page
if request.method == "GET":
return render(request, "newopponent.html", {})
try:
# Try to create the new engine and send them to the opponents
# page where they will be able to see their newly created engine
EtherBench.utils.newOpponent(request)
return HttpResponseRedirect("/opponents/")
except Exception as err:
return index(request, str(err))
@staff_member_required
def newEngineTest(request):
# User trying to view the new engine test page
if request.method == "GET":
# Find the Opponents used in the last successful engine test
enginetests = EngineTest.objects.all()[::-1]
for enginetest in enginetests:
if enginetest.passed:
lastopponents = list(map(int, enginetest.opponentids.split(",")))
break
else: lastopponents = []
# Create a dictionary for each opponent (This form only needs the id
# and the name of each engine, so just do that to save time)
opponents = Opponent.objects.all()
opponentsdicts = []
for opponent in opponents:
opponentsdicts.append({
"id" : opponent.id,
"name" : opponent.name,
"checked" : ["", "checked"][opponent.id in lastopponents or lastopponents == []]
})
return render(request, "newenginetest.html", {"opponents" : opponentsdicts})
try:
# Try to create the new engine test
EtherBench.utils.newEngineTest(request)
return HttpResponseRedirect("/index/")
except Exception as err:
return index(request, str(err))
@staff_member_required
def editOpponent(request, opponentid):
# User trying to view the edit opponent page
if request.method == "GET":
try: data = {"opponent" : Opponent.objects.get(id=opponentid).dictionary()}
except: return HttpResponseRedirect("/opponents/")
return render(request, "editopponent.html", data)
# Try to modify the opponent
try:
opponent = Opponent.objects.get(id=opponentid)
opponent.name = request.POST["name"]
opponent.protocol = request.POST["protocol"]
opponent.source = request.POST["source"]
opponent.bench = int(request.POST["bench"])
opponent.depth = int(request.POST["depth"])
opponent.save()
except: pass
finally: return HttpResponseRedirect("/opponents/")
@staff_member_required
def editEngineTest(request, enginetestid):
# User trying to view the edit engine test page
if request.method == "GET":
try:
data = {"enginetest" : EngineTest.objects.get(id=enginetestid).dictionary()}
if data["enginetest"]["passed"]: data["enginetest"]["state"] = "PASSED"
elif data["enginetest"]["failed"]: data["enginetest"]["state"] = "FAILED"
else: data["enginetest"]["state"] = "ACTIVE"
except: return HttpResponseRedirect("/index/")
return render(request, "editenginetest.html", data)
# Try to modify the Engine Test
try:
enginetest = EngineTest.objects.get(id=enginetestid)
enginetest.test.bench = int(request.POST["testbench"])
enginetest.base.bench = int(request.POST["basebench"])
enginetest.priority = int(request.POST["priority"])
enginetest.alpha = float(request.POST["alpha"])
enginetest.beta = float(request.POST["beta"])
enginetest.tlower = EtherBench.stats.calculateTLower(enginetest)
enginetest.tupper = EtherBench.stats.calculateTUpper(enginetest)
enginetest.passed = request.POST["state"] == "PASSED"
enginetest.failed = request.POST["state"] == "FAILED"
enginetest.save()
enginetest.test.save()
enginetest.base.save()
except Exception as err:
print(err)
finally: return HttpResponseRedirect("/index/")
#### Methods below are not for the proper GUI interface to the framework.
#### These methods are only supposed to be called on by the actual workers
def wrongBench(request, enginetestid):
# Worker found a wrong bench. Set priority down
# to -10 as a method of flagging the test
enginetest = EngineTest.objects.get(id=enginetestid)
enginetest.priority = -10
enginetest.save()
|
aagallag/nexmon
|
utilities/aircrack-ng/scripts/dcrack.py
|
Python
|
gpl-3.0
| 17,795
| 0.044338
|
#!/usr/bin/python
import sys
import os
import subprocess
import random
import time
import sqlite3
import threading
import hashlib
import gzip
import json
import datetime
import re
if sys.version_info[0] >= 3:
from socketserver import ThreadingTCPServer
from urllib.request import urlopen, URLError
from urllib.parse import urlparse, parse_qs
from http.client import HTTPConnection
from http.server import SimpleHTTPRequestHandler
else:
from SocketServer import ThreadingTCPServer
from urllib2 import urlopen, URLError
from urlparse import urlparse, parse_qs
from httplib import HTTPConnection
from SimpleHTTPServer import SimpleHTTPRequestHandler
bytes = lambda a, b : a
port = 1337
url = None
cid = None
tls = threading.local()
nets = {}
cracker = None
class ServerHandler(SimpleHTTPRequestHandler):
def do_GET(s):
result = s.do_req(s.path)
if not result:
return
s.send_response(200)
s.send_header("Content-type", "text/plain")
s.end_headers()
s.wfile.write(bytes(result, "UTF-8"))
def do_POST(s):
if ("dict" in s.path):
s.do_upload_dict()
if ("cap" in s.path):
s.do_upload_cap()
s.send_response(200)
s.send_header("Content-type", "text/plain")
s.end_headers()
s.wfile.write(bytes("OK", "UTF-8"))
def do_upload_dict(s):
con = get_con()
f = "dcrack-dict"
c = f + ".gz"
o = open(c, "wb")
cl = int(s.headers['Content-Length'])
o.write(s.rfile.read(cl))
o.close()
decompress(f)
sha1 = hashlib.sha1()
x = open(f, "rb")
sha1.update(x.read())
x.close()
h = sha1.hexdigest()
x = open(f, "rb")
for i, l in enumerate(x):
pass
i = i + 1
x.close()
n = "%s-%s.txt" % (f, h)
os.rename(f, n)
os.rename(c, "%s.gz" % n)
c = con.cursor()
c.execute("INSERT into dict values (?, ?, 0)", (h, i))
con.commit()
def do_upload_cap(s):
cl = int(s.headers['Content-Length'])
f = open("dcrack.cap.tmp.gz", "wb")
f.write(s.rfile.read(cl))
f.close()
decompress("dcrack.cap.tmp")
os.rename("dcrack.cap.tmp.gz", "dcrack.cap.gz")
os.rename("dcrack.cap.tmp", "dcrack.cap")
def do_req(s, path):
con = get_con()
c = con.cursor()
c.execute("""DELETE from clients where
(strftime('%s', datetime()) - strftime('%s', last))
> 300""")
con.commit()
if ("ping" in path):
return s.do_ping(path)
if ("getwork" in path):
return s.do_getwork(path)
if ("dict" in path and "status" in path):
return s.do_dict_status(path)
if ("dict" in path and "set" in path):
return s.do_dict_set(path)
if ("dict" in path):
return s.get_dict(path)
if ("net" in path and "/crack" in path):
return s.do_crack(path)
if ("net" in path and "result" in path):
return s.do_result(path)
if ("cap" in path):
return s.get_cap(path)
if ("status" in path):
return s.get_status()
if ("remove" in path):
return s.remove(path)
return "error"
def remove(s, path):
con = get_con()
p = path.split("/")
n = p[4].upper()
c = con.cursor()
c.execute("DELETE from nets where bssid = ?", (n,))
con.commit()
c.execute("DELETE from work where net = ?", (n,))
con.commit()
return "OK"
def get_status(s):
con = get_con()
c = con.cursor()
c.execute("SELECT * from clients")
clients = []
for r in c.fetchall():
clients.append(r['speed'])
nets = []
c.execute("SELECT * from dict where current = 1")
dic = c.fetchone()
c.execute("SELECT * from nets")
for r in c.fetchall():
n = { "bssid" : r['bssid'] }
if r['pass']:
n["pass"] = r['pass']
if r['state'] != 2:
n["tot"] = dic["lines"]
did = 0
cur = con.cursor()
cur.execute("""SELECT * from work where net = ?
and dict = ? and state = 2""",
(n['bssid'], dic['id']))
for row in cur.fetchall():
did += row['end'] - row['start']
n["did"] = did
nets.append(n)
d = { "clients" : clients, "nets" : nets }
return json.dumps(d)
def do_result_pass(s, net, pw):
con = get_con()
pf = "dcrack-pass.txt"
f = open(pf, "w")
f.write(pw)
f.write("\n")
f.close()
cmd = ["aircrack-ng", "-w", pf, "-b", net, "-q", "dcrack.cap"]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE)
res = p.communicate()[0]
res = str(res)
os.remove(pf)
if not "KEY FOUND" in res:
return "error"
s.net_done(net)
c = con.cursor()
c.execute("UPDATE nets set pass = ? where bssid = ?", \
(pw, net))
con.commit()
return "OK"
def net_done(s, net):
con = get_con()
c = con.cursor()
c.execute("UPDATE nets set state = 2 where bssid = ?",
(net,))
c.execute("DELETE from work where net = ?", (net,))
con.commit()
def do_result(s, path):
con = get_con()
p = path.split("/")
n = p[4].upper()
x = urlparse(path)
qs = parse_qs(x.query)
if "pass" in qs:
return s.do_result
|
_pass(n, qs['pass'][0])
wl = qs['wl'][0]
c = con.cur
|
sor()
c.execute("SELECT * from nets where bssid = ?", (n,))
r = c.fetchone()
if r and r['state'] == 2:
return "Already done"
c.execute("""UPDATE work set state = 2 where
net = ? and dict = ? and start = ? and end = ?""",
(n, wl, qs['start'][0], qs['end'][0]))
con.commit()
if c.rowcount == 0:
c.execute("""INSERT into work values
(NULL, ?, ?, ?, ?, datetime(), 2)""",
(n, wl, qs['start'][0], qs['end'][0]))
con.commit()
# check status
c.execute("""SELECT * from work where net = ? and dict = ?
and state = 2 order by start""", (n, wl))
i = 0
r = c.fetchall()
for row in r:
if i == row['start']:
i = row['end']
else:
break
c.execute("SELECT * from dict where id = ? and lines = ?",
(wl, i))
r = c.fetchone()
if r:
s.net_done(n)
return "OK"
def get_cap(s, path):
return s.serve_file("dcrack.cap.gz")
def get_dict(s, path):
p = path.split("/")
n = p[4]
fn = "dcrack-dict-%s.txt.gz" % n
return s.serve_file(fn)
def serve_file(s, fn):
s.send_response(200)
s.send_header("Content-type", "application/x-gzip")
s.end_headers()
# XXX openat
f = open(fn, "rb")
s.wfile.write(f.read())
f.close()
return None
def do_crack(s, path):
con = get_con()
p = path.split("/")
n = p[4].upper()
c = con.cursor()
c.execute("INSERT into nets values (?, NULL, 1)", (n,))
con.commit()
return "OK"
def do_dict_set(s, path):
con = get_con()
p = path.split("/")
h = p[4]
c = con.cursor()
c.execute("UPDATE dict set current = 0")
c.execute("UPDATE dict set current = 1 where id = ?", (h,))
con.commit()
return "OK"
def do_ping(s, path):
con = get_con()
p = path.split("/")
cid = p[4]
x = urlparse(path)
qs = parse_qs(x.query)
speed = qs['speed'][0]
c = con.cursor()
c.execute("SELECT * from clients where id = ?", (cid,))
r = c.fetchall()
if (not r):
c.execute("INSERT into clients values (?, ?, datetime())",
(cid, int(speed)))
else:
c.execute("""UPDATE clients set speed = ?,
last = datetime() where id = ?""",
(int(speed), cid))
con.commit()
return "60"
def try_network(s, net, d):
con = get_con()
c = con.cursor()
c.execute("""SELECT * from work where net = ? and dict = ?
order by start""", (net['bssid'], d['id']))
r = c.fetchall()
s = 5000000
i = 0
found = False
for row in r:
if found:
if i + s > row['start']:
s = row['start'] - i
break
if (i >= row['start'] and i <= row['end']):
i = row['end']
else:
found = True
if i + s > d['lines']:
s = d['lines'] - i
if s == 0:
return None
c.execute("INSERT into work values (NULL, ?, ?, ?, ?, datetime(), 1)",
(net['bssid'], d['id'], i, i + s))
con.commit()
crack = { "net" : net['bssid'], \
"dict" : d['id'], \
"start" : i, \
"end" : i + s }
j = json.dumps(crack)
return j
def do_getwork(s, path):
con = get_con()
c = con.cursor()
c.execute("""DELETE from work where
((strftime('%s', datetime()) - strftime('%s', last))
> 3600) and state = 1""")
con.commit()
c.execute("SELECT * from dict where current = 1")
d = c.fetchone()
c.execute("SELECT * from nets where state = 1")
r = c.fetchall
|
intel/ipmctl
|
src/os/ini/ini_auto_gen_default_config.py
|
Python
|
bsd-3-clause
| 682
| 0.001466
|
# Copyright (c) 2018, Intel Corporation.
# SPDX-License-Identifier: BSD-3-Clause
# Create the ixp_default.conf file used by installer based on the ixp_default.h
import argparse
delete_list = ["\"", "\\n"]
parser = argparse.ArgumentParser(description='The default ini conf file generator.')
parser.add_argument('src_file', help='input file name')
parser.add_argument('dest_file', help='output file name')
|
args = parser.parse_args()
infile = open(args.src_file, 'r')
outfile = open(args.dest_file, 'w')
for line in infile:
if line.rstr
|
ip():
for word in delete_list:
line = line.replace(word, "")
outfile.write(line)
infile.close()
outfile.close()
|
tidalcycles/tidalcycles.github.io
|
bin/build_examples.py
|
Python
|
gpl-3.0
| 1,514
| 0.002642
|
#!/usr/bin/python
import glob
import os.path
import re
import hashlib
from bs4 import BeautifulSoup
from subprocess import call, Popen, PIPE, STDOUT
root = "/home/alex/tidalcycles.github.io/_site/"
dnmatcher = re.compile(r'^\s*d[0-9]\s*(\$\s*)?')
crmatcherpre = re.compile(r'^[\s\n\r]*')
crmatcherpost = re.compile(r'[\s\n\r]*$')
sizematcher
|
= re.compile(r'\bsize\b')
outpath = "../patterns/"
for fn in glob.glob(os.path.join(root, "*.html")):
soup = BeautifulSoup(open(fn), 'lxml')
patterns = soup.find_all("div", "render")
if len(patterns) > 0:
print(fn + " (" + str(len(patterns)) +")")
for pattern in patterns:
code = pattern.get_text()
code = crmatcherpre.sub('', code)
code = crma
|
tcherpost.sub('', code)
digest = hashlib.md5(code).hexdigest()
code = sizematcher.sub('Sound.Tidal.Context.size', code)
outfn = outpath + digest + ".mp3"
if (not os.path.exists(outfn)):
print "building outfn: " + outfn
print "digest:" + digest
print "code >>" + code + "<<"
code = dnmatcher.sub('', code)
p = Popen(["./runpattern", outfn], stdout=PIPE, stdin=PIPE, stderr=STDOUT)
tidalout = p.communicate(input=code)[0]
print(tidalout)
if p.returncode == 0:
print "worked> " + outfn
else:
print "did not work."
|
cajal/pupil-tracking
|
setup.py
|
Python
|
mit
| 1,026
| 0.001949
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
long_description = "Pupil tracking library for mouse pupil"
setup(
name='pupil_tracking',
version='0.1.0.dev1',
description="Pupil tracker library.",
long_description=long_description,
author='Jugnu Agrawal Fabian Sinz',
author_email
|
='jugnu.ag.jsr@gmail.com sinz@bcm.edu',
license="Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License",
url='https://github.com/cajal/pupil-tracking',
keywords='eyetracker',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=['numpy'
|
],
classifiers=[
'Development Status :: 1 - Beta',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License',
'Topic :: Database :: Front-Ends',
],
)
|
mcalmer/spacewalk
|
spacecmd/src/lib/utils.py
|
Python
|
gpl-2.0
| 24,782
| 0.000726
|
#
# Licensed under the GNU General Public License Version 3
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2013 Aron Parsons <aronparsons@gmail.com>
# Copyright (c) 2011--2018 Red Hat, Inc.
#
# NOTE: the 'self' variable is an instance of SpacewalkShell
# wildcard import
# pylint: disable=W0401,W0614
# unused argument
# pylint: disable=W0613
# invalid function name
# pylint: disable=C0103
import logging
import os
import pickle
import re
import readline
import shlex
import sys
import time
import argparse
try:
from xmlrpc import client as xmlrpclib
except ImportError:
import xmlrpclib
from collections import deque
from datetime import datetime, timedelta
from difflib import unified_diff
from tempfile import mkstemp
from textwrap import wrap
from subprocess import Popen, PIPE
try:
import json
except ImportError:
import simplejson as json # python < 2.6
import rpm
from spacecmd.argumentparser import SpacecmdArgumentParser
__EDITORS = ['vim', 'vi', 'nano', 'emacs']
def get_argument_parser():
return SpacecmdArgumentParser()
def parse_command_arguments(command_args, argument_parser, glob=True):
try:
parts = shlex.split(command_args)
# allow simple globbing
if glob:
parts = [re.sub(r'\*', '.*', a) for a in parts]
argument_parser.add_argument('leftovers', nargs='*',
help=argparse.SUPPRESS)
opts = argument_parser.parse_args(args=parts)
if opts.leftovers:
leftovers = opts.leftovers
else:
leftovers = []
return leftovers, opts
except IndexError:
return None, None
# check if any named options we
|
re passed to the function, and if so,
# declare that the function is non-interactive
# note: because we do it this way, default options are not passed into
# OptionParser, as it would make determining if any options were passed
# too complex
def is_
|
interactive(options):
for key in options.__dict__:
if options.__dict__[key]:
return False
return True
def load_cache(cachefile):
data = {}
expire = datetime.now()
logging.debug('Loading cache from %s', cachefile)
if os.path.isfile(cachefile):
try:
inputfile = open(cachefile, 'rb')
data = pickle.load(inputfile)
inputfile.close()
except EOFError:
# If cache generation is interrupted (e.g by ctrl-c) you can end up
# with an EOFError exception due to the partial picked file
# So we catch this error and remove the corrupt partial file
# If you don't do this then spacecmd will fail with an unhandled
# exception until the partial file is manually removed
logging.warning("Loading cache file %s failed", cachefile)
logging.warning("Cache generation was probably interrupted, removing corrupt %s", cachefile)
os.remove(cachefile)
except IOError:
logging.error("Couldn't load cache from %s", cachefile)
if isinstance(data, (list, dict)):
if 'expire' in data:
expire = data['expire']
del data['expire']
else:
logging.debug('%s does not exist', cachefile)
return data, expire
def save_cache(cachefile, data, expire=None):
if expire:
data['expire'] = expire
try:
output = open(cachefile, 'wb')
pickle.dump(data, output, pickle.HIGHEST_PROTOCOL)
output.close()
except IOError:
logging.error("Couldn't write to %s", cachefile)
if 'expire' in data:
del data['expire']
def tab_completer(options, text):
return [o for o in options if re.match(text, o)]
def filter_results(items, patterns, search=False):
matches = []
compiled_patterns = []
for pattern in patterns:
if search:
compiled_patterns.append(re.compile(pattern, re.I))
else:
# If in "match" mode, we don't want to match substrings
compiled_patterns.append(re.compile("^" + pattern + "$", re.I))
for item in items:
for pattern in compiled_patterns:
if search:
result = pattern.search(item)
else:
result = pattern.match(item)
if result:
matches.append(item)
break
return matches
def editor(template='', delete=False):
# create a temporary file
(descriptor, file_name) = mkstemp(prefix='spacecmd.')
if template and descriptor:
try:
handle = os.fdopen(descriptor, 'w')
handle.write(template)
handle.close()
except IOError:
logging.warning('Could not open the temporary file')
# use the user's specified editor
if 'EDITOR' in os.environ:
if __EDITORS[0] != os.environ['EDITOR']:
__EDITORS.insert(0, os.environ['EDITOR'])
success = False
for editor_cmd in __EDITORS:
try:
exit_code = os.spawnlp(os.P_WAIT, editor_cmd,
editor_cmd, file_name)
if exit_code == 0:
success = True
break
else:
logging.error('Editor exited with code %i', exit_code)
except OSError:
pass
if not success:
logging.error('No editors found')
return ('', '')
if os.path.isfile(file_name) and exit_code == 0:
try:
# read the session (format = username:session)
handle = open(file_name, 'r')
contents = handle.read()
handle.close()
if delete:
try:
os.remove(file_name)
file_name = ''
except OSError:
logging.error('Could not remove %s', file_name)
return (contents, file_name)
except IOError:
logging.error('Could not read %s', file_name)
return ('', '')
return ('', '')
def prompt_user(prompt, noblank=False, multiline=False):
try:
while True:
if multiline:
print(prompt)
userinput = sys.stdin.read()
else:
try:
# python 2 must call raw_input() because input()
# also evaluates the user input and that causes
# problems.
userinput = raw_input('%s ' % prompt)
except NameError:
# python 3 replaced raw_input() with input()...
# it no longer evaulates the user input.
userinput = input('%s ' % prompt)
if noblank:
if userinput != '':
break
else:
break
except EOFError:
print()
return ''
if userinput != '':
last = readline.get_current_history_length() - 1
if last >= 0:
readline.remove_history_item(last)
return userinput
# parse time input from the user and return xmlrpclib.DateTime
def parse_time_input(userinput=''):
timestamp = None
if userinput == '' or re.match('now', userinput, re.I):
timestamp = datetime.now()
# handle YYYMMDDHHMM times
if not timestamp:
match = re.match(r'^(\d{4})(\d{2})(\d{2})(\d{2})?(\d{2})?(\d{2})?$', userinput)
if match:
date_format =
|
pulinagrawal/nupic
|
examples/opf/experiments/opfrunexperiment_test/simpleOPF/hotgym/description.py
|
Python
|
agpl-3.0
| 16,183
| 0.003399
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'~/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsE
|
ncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
#
|
w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'fields': [ ('numericFieldNameA', 'mean'),
('numericFieldNameB', 'sum'),
('categoryFieldNameC', 'first')],
'hours': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalNextStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
'timestamp_timeOfDay': dict(fieldname='timestamp', type='DateEncoder',timeOfDay=(21,8)),
'timestamp_dayOfWeek': dict(fieldname='timestamp', type='DateEncoder',dayOfWeek=(21,3)),
'consumption': dict(fieldname='consumption',type='ScalarEncoder',
name='consumption', minval=0,maxval=200,
clipInput=True, n=153, w=21, ) #resolution=5),
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose potentialPct * (2*potentialRadius+1)^2
'potentialPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tmEnable' : True,
'tmParams': {
# TP diagnostic output verbosity control;
|
596acres/django-livinglots-groundtruth
|
livinglots_groundtruth/forms.py
|
Python
|
bsd-3-clause
| 569
| 0
|
from django import forms
fro
|
m django.utils.translation import ugettext_lazy as _
class GroundtruthRecordFormMixin(forms.ModelForm):
def clean(self):
cleaned_data = super(GroundtruthRecordFormMixin, self).clean()
contact_email = cleaned_data.get('contact_email', None)
contact_phone = cleaned_data.get('contact_phone', None)
if not (contact_email or contact_phone):
raise forms.ValidationError(_('Please enter an email address or '
|
'phone number'))
return cleaned_data
|
hiuwo/acq4
|
acq4/analysis/modules/MapCombiner/__init__.py
|
Python
|
mit
| 36
| 0
|
from Ma
|
pCombiner import MapCom
|
biner
|
tdavis/inboxtix
|
inboxtix/home/views.py
|
Python
|
mit
| 594
| 0.006734
|
from django.http import HttpResponse, HttpResponseForbidden
from django.core.cache import cache
from inboxtix.util import get_api_tree
def autocom
|
plete_category(request):
if not request.is_ajax():
return HttpResponseForbidden()
name = request.GET.get('q',None)
limit = request.GET.get('limit', 10)
if not name:
return HttpResponse('')
tree =
|
get_api_tree('category', 'search', **{'name':name, 'limit':limit})
matches = []
for cat in tree.iter('category'):
matches.append(cat.find('name').text)
return HttpResponse('\n'.join(matches))
|
btbonval/DieStatistician
|
tests/testseries.py
|
Python
|
mit
| 1,440
| 0.000694
|
'''
Bryan Bonvallet
2013
This file tests functionality of series.py (and PMF.py).
'''
from testbase import FinitePMF
from testbase import InfinitePMF
from testbase import TestSeriesPMF
class TestSeries(TestSeriesPMF):
# Test functions in series.py
def test_finite_str(self):
# Test that a string is returned.
obj = self._build_finite_obj()
test = str(obj)
self.assertTrue(isinstance(test, str
|
) or isinstance(test, unicode))
def test_infinite_str(self):
# Test that a string is returned.
obj = self._build_infinite_obj()
test = str(obj)
self.assertTrue(isinstance(test, str) or isinstance(test, unicode))
def test_infinite_str_maxterms(self):
# Test that maxterms is respected
obj = self._build_infinite_obj()
self.assertTrue(hasattr(obj, 'maxterms'))
terms = obj.maxterms
# Triple the number of terms to guarantee it won'
|
t display them all.
obj = self._build_infinite_obj(terms*3)
test = str(obj)
# This test makes strong assumptions about the string representation.
# Assume terms are listed in rows
# Grab the first row of terms between square brackets
nums = test.split('[')[-1].split(']')[0]
# Assume numbers are white space separated
nums = nums.split()
# There should be #terms not #terms*3
self.assertEqual(len(nums), terms)
|
edoburu/django-fluent-contents
|
fluent_contents/plugins/oembeditem/__init__.py
|
Python
|
apache-2.0
| 199
| 0
|
VERSION = (0, 1, 0)
# Do some version check
|
ing
try:
import micawber
except ImportError:
raise ImportError(
|
"The 'micawber' package is required to use the 'oembeditem' plugin."
)
|
Csega/PythonCAD3
|
Interface/cadscene.py
|
Python
|
gpl-2.0
| 15,874
| 0.006678
|
#
# Copyright (c) 2010 Matteo Boscolo, Gertwin Groen
#
# This file is part of PythonCAD.
#
# PythonCAD is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# PythonCAD is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PythonCAD; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
# This module the graphics scene class
#
#
import math
from PyQt5 import QtCore, QtGui, QtWidgets
from Generic.application import Application
from Interface.pycadapp import PyCadApp
from Interface.Entity.base import BaseEntity
from Interface.Entity.segment import Segment
from Interface.Entity.arc import Arc
from Interface.Entity.text import Text
from Interface.Entity.ellipse import Ellipse
from Interface.Entity.arrowitem import ArrowItem
from Interface.Entity.actionhandler import PositionHandler
from Interface.cadinitsetting import *
from Interface.dinamicentryobject import DinamicEntryLine
from Interface.Preview.base import BaseQtPreviewItem
from Kernel.pycadevent import PyCadEvent
from Kernel.GeoEntity.point import Point
from Kernel.exception import *
class CadScene(QtWidgets.QGraphicsScene):
def __init__(self, document, parent=None):
super(CadScene, self).__init__(parent)
# drawing limits
self.setSceneRect(-10000, -10000, 20000, 20000)
# scene custom event
# self.pyCadScenePressEvent=PyCadEvent() <<<<this seems unuseful
self.updatePreview = PyCadEvent()
self.zoomWindows = PyCadEvent()
self.fireCommandlineFocus = PyCadEvent()
self.fireKeyShortcut = PyCadEvent()
self.fireKeyEvent = PyCadEvent()
self.fireWarning = PyCadEvent()
self.fireCoords = PyCadEvent()
# fire Pan and Zoom events to the view
self.firePan = PyCadEvent()
self.fireZoomFit = PyCadEvent()
self.__document = document
self.__oldClickPoint = None
self.needPreview = False
self.forceDirection = None
self.__lastPickedEntity = None
self.isInPan = False
self.forceSnap = None
self._cmdZoomWindow = None
self.showHandler = False
self.posHandler = None
#
# new command implementation
#
self.__activeKernelCommand = None
self.activeICommand = None
#
self.__grapWithd = 20.0
#
# Input implemetation by carlo
#
self.fromPoint = None # frompoint is assigned in icommand.getClickedPoint() and deleted by applycommand and cancelcommand, is needed for statusbar coordinates dx,dy
self.mouseOnSceneX = 0.0
self.mouseOnSceneY = 0.0
self.selectionAddMode = False
# scene aspect
r, g, b = BACKGROUND_COLOR #defined in cadinitsetting
self.setBackgroundBrush(QtGui.QBrush(QtGui.QColor(r, g, b), QtCore.Qt.SolidPattern))
@property
def activeKernelCommand(self):
"""
return the active command
"""
return self.__activeKernelCommand
@activeKernelCommand.setter
def activeKernelCommand(self, value):
self.__activeKernelCommand = value
def setActiveSnap(self, value):
if self.activeICommand != None:
self.activeICommand.activeSnap = value
def _qtInputPopUpReturnPressed(self):
self.forceDirection = "F" + self.qtInputPopUp.text
# ###############################################MOUSE EVENTS
# ##########################################################
def mouseMoveEvent(self, event):
scenePos = event.scenePos()
self.mouseOnSceneX = scenePos.x()
self.mouseOnSceneY = scenePos.y() * -1.0
#
#This event manages middle mouse button PAN
#
if self.isInPan:
self.firePan(None, event.scenePos())
#
#This event manages the status bar coordinates display (relative or absolute depending on self.fromPoint)
#
else:
if self.fromPoint == None:
self.fireCoords(scenePos.x(), (scenePos.y() * -1.0), "abs")
else:
x = scenePos.x() - self.fromPoint.getx()
y = scenePos.y() * -1.0 - self.fromPoint.gety()
self.fireCoords(x, y, "rel")
#
#This seems needed to preview commands
#
if self.activeICommand:
# scenePos = event.scenePos()
distance = None
point = Point(scenePos.x(), scenePos.y() * -1.0)
qtItem = [self.itemAt(scenePos, QtGui.QTransform())]
if self.__oldClickPoint:
|
distance = self.getDistance(event)
self.activeICommand.updateMauseEvent(point, distance, qtItem)
# self.updatePreview(self,point, distance)
#
# path=QtGui.QPainterPath()
# path.addRect(scenePos.x()-self.__grapWithd/2, scenePos.y()+self.__grapWithd/2, self.__grapWithd, self.__grapWithd)
# self.setSelectionArea(path)
#
super(CadScene, self).mouseMoveEvent(event)
return
def mousePressEvent(self
|
, event):
if event.button() == QtCore.Qt.MidButton:
self.isInPan = True
self.firePan(True, event.scenePos())
if not self.isInPan:
qtItem = self.itemAt(event.scenePos(), QtGui.QTransform())
p = QtCore.QPointF(event.scenePos().x(), event.scenePos().y())
if qtItem:
qtItem.setSelected(True)
self.updateSelected()
# else:
# print "No item selected"
# re fire the event
super(CadScene, self).mousePressEvent(event)
def mouseReleaseEvent(self, event):
if event.button() == QtCore.Qt.MidButton:
self.isInPan = False
self.firePan(False, None)
if not self.isInPan:
self.updateSelected()
qtItems = [item for item in self.selectedItems() if isinstance(item, BaseEntity)]
if self.activeICommand:
if event.button() == QtCore.Qt.RightButton:
try:
self.activeICommand.applyCommand()
except PyCadWrongImputData:
self.fireWarning("Wrong input value")
super(CadScene, self).mouseReleaseEvent(event)
return
if event.button() == QtCore.Qt.LeftButton:
point = None
if self.showHandler:
if self.posHandler == None:
self.posHandler = PositionHandler(event.scenePos())
self.addItem(self.posHandler)
return
else:
self.posHandler.show()
return
if point == None:
point = Point(event.scenePos().x(), event.scenePos().y() * -1.0)
# fire the mouse to the ICommand class
self.activeICommand.addMauseEvent(point=point, entity=qtItems, force=self.forceDirection)
else:
self.hideHandler()
if self._cmdZoomWindow:
self.zoomWindows(self.selectionArea().boundingRect())
self._cmdZoomWindow = None
self.clearSelection() # clear the selection after the window zoom, why? because zoom windows select entities_>that's bad
super(CadScene, self).mouseReleaseEvent(event)
return
def hanhlerDoubleClick(self):
"""
event add from the handler
|
GroestlCoin/electrum-grs
|
electrum_grs/gui/qt/exception_window.py
|
Python
|
gpl-3.0
| 7,308
| 0.001505
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import html
from typing import TYP
|
E_CHECKING, Optional, Set
from PyQt5.QtCore import QObject
import PyQt5.QtCore as QtCore
from PyQt5.QtWidgets import (QWidget, QLabel, QPushButton, QTextEdit,
QMessageBox, QHBoxLayout, QVBoxLayout)
from electrum_grs.i18n import _
from electrum_grs.base_crash_reporter import BaseCrashReporter
from electrum_grs.logging import Logger
from electrum_grs import constants
from electrum_grs.network import Ne
|
twork
from .util import MessageBoxMixin, read_QIcon, WaitingDialog
if TYPE_CHECKING:
from electrum_grs.simple_config import SimpleConfig
from electrum_grs.wallet import Abstract_Wallet
class Exception_Window(BaseCrashReporter, QWidget, MessageBoxMixin, Logger):
_active_window = None
def __init__(self, config: 'SimpleConfig', exctype, value, tb):
BaseCrashReporter.__init__(self, exctype, value, tb)
self.network = Network.get_instance()
self.config = config
QWidget.__init__(self)
self.setWindowTitle('Electrum-GRS - ' + _('An Error Occurred'))
self.setMinimumSize(600, 300)
Logger.__init__(self)
main_box = QVBoxLayout()
heading = QLabel('<h2>' + BaseCrashReporter.CRASH_TITLE + '</h2>')
main_box.addWidget(heading)
main_box.addWidget(QLabel(BaseCrashReporter.CRASH_MESSAGE))
main_box.addWidget(QLabel(BaseCrashReporter.REQUEST_HELP_MESSAGE))
collapse_info = QPushButton(_("Show report contents"))
collapse_info.clicked.connect(
lambda: self.msg_box(QMessageBox.NoIcon,
self, _("Report contents"), self.get_report_string(),
rich_text=True))
main_box.addWidget(collapse_info)
main_box.addWidget(QLabel(BaseCrashReporter.DESCRIBE_ERROR_MESSAGE))
self.description_textfield = QTextEdit()
self.description_textfield.setFixedHeight(50)
self.description_textfield.setPlaceholderText(self.USER_COMMENT_PLACEHOLDER)
main_box.addWidget(self.description_textfield)
main_box.addWidget(QLabel(BaseCrashReporter.ASK_CONFIRM_SEND))
buttons = QHBoxLayout()
report_button = QPushButton(_('Send Bug Report'))
report_button.clicked.connect(self.send_report)
report_button.setIcon(read_QIcon("tab_send.png"))
buttons.addWidget(report_button)
never_button = QPushButton(_('Never'))
never_button.clicked.connect(self.show_never)
buttons.addWidget(never_button)
close_button = QPushButton(_('Not Now'))
close_button.clicked.connect(self.close)
buttons.addWidget(close_button)
main_box.addLayout(buttons)
self.setLayout(main_box)
self.show()
def send_report(self):
def on_success(response):
# note: 'response' coming from (remote) crash reporter server.
# It contains a URL to the GitHub issue, so we allow rich text.
self.show_message(parent=self,
title=_("Crash report"),
msg=response,
rich_text=True)
self.close()
def on_failure(exc_info):
e = exc_info[1]
self.logger.error('There was a problem with the automatic reporting', exc_info=exc_info)
self.show_critical(parent=self,
msg=(_('There was a problem with the automatic reporting:') + '<br/>' +
repr(e)[:120] + '<br/><br/>' +
_("Please report this issue manually") +
f' <a href="{constants.GIT_REPO_ISSUES_URL}">on GitHub</a>.'),
rich_text=True)
proxy = self.network.proxy
task = lambda: BaseCrashReporter.send_report(self, self.network.asyncio_loop, proxy)
msg = _('Sending crash report...')
WaitingDialog(self, msg, task, on_success, on_failure)
def on_close(self):
Exception_Window._active_window = None
self.close()
def show_never(self):
self.config.set_key(BaseCrashReporter.config_key, False)
self.close()
def closeEvent(self, event):
self.on_close()
event.accept()
def get_user_description(self):
return self.description_textfield.toPlainText()
def get_wallet_type(self):
wallet_types = Exception_Hook._INSTANCE.wallet_types_seen
return ",".join(wallet_types)
def _get_traceback_str_to_display(self) -> str:
# The msg_box that shows the report uses rich_text=True, so
# if traceback contains special HTML characters, e.g. '<',
# they need to be escaped to avoid formatting issues.
traceback_str = super()._get_traceback_str_to_display()
return html.escape(traceback_str)
def _show_window(*args):
if not Exception_Window._active_window:
Exception_Window._active_window = Exception_Window(*args)
class Exception_Hook(QObject, Logger):
_report_exception = QtCore.pyqtSignal(object, object, object, object)
_INSTANCE = None # type: Optional[Exception_Hook] # singleton
def __init__(self, *, config: 'SimpleConfig'):
QObject.__init__(self)
Logger.__init__(self)
assert self._INSTANCE is None, "Exception_Hook is supposed to be a singleton"
self.config = config
self.wallet_types_seen = set() # type: Set[str]
sys.excepthook = self.handler
self._report_exception.connect(_show_window)
@classmethod
def maybe_setup(cls, *, config: 'SimpleConfig', wallet: 'Abstract_Wallet' = None) -> None:
if not config.get(BaseCrashReporter.config_key, default=True):
return
if not cls._INSTANCE:
cls._INSTANCE = Exception_Hook(config=config)
if wallet:
cls._INSTANCE.wallet_types_seen.add(wallet.wallet_type)
def handler(self, *exc_info):
self.logger.error('exception caught by crash reporter', exc_info=exc_info)
self._report_exception.emit(self.config, *exc_info)
|
Alshootfa/I.K.R.A
|
app.py
|
Python
|
mit
| 3,051
| 0.004916
|
from flask import Flask, url_for, redirect, request, render_template, send_from_directory
from flask.ext.sqlalchemy import SQLAlchemy
from model import *
from werkzeug import secure_filename
import os
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'mp4'])
if not os.path.exists('data/media'):
os.makedirs('data/media')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'data/media'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///data/test.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
with app.app_context():
db.init_app(app)
db.drop_all()
db.create_all()
# sample data
sample_courses = [
Course('chemistry', 'This is a thing that has to do with chemistry', 'chemistry.png'),
Course('physics', 'This is a thing that has to do with physics', 'physics.png'),
Course('programming', 'This is a thing that has to do with programming', 'programming.png'),
]
for course in sample_courses:
db.session.add(course)
db.session.commit()
@app.route("/")
def home():
return redirect('/courses')
@app.route('/courses')
def list_courses():
courses = Course.query.all()
return render_template('list_courses.html', courses=courses)
@app.route('/courses/<course_id>/media/add', methods=['GET', 'POST'])
def add_media(course_id):
if request.method == 'POST':
file = request.files['file']
if not file or not is_allowed_file(file.filename):
redirect('/courses/<course_id>/media/add', course_id=course_id)
upload_media(course_id, file)
return redirect(url_for('get_course', course_id=course_id))
course = Course.query.filter_by(ID=course_id).first()
return render_template('add_media.html', cours
|
e=course)
def get_courses_dir(course_id):
result = os.path.join('courses', course_id)
try:
os.makedirs(result)
except OSError:
pass
return result
@app.route('/
|
courses/<course_id>')
def get_course(course_id):
course = Course.query.filter_by(ID=course_id).first()
media = Media.query.filter_by(course_id=course_id)
return render_template('course.html', course=course, media=media)
@app.route('/courses/<course_id>/media/<name>')
def get_media(course_id, name):
media = Media.query.filter_by(course_id=course_id, name=name).first()
return send_from_directory(app.config['UPLOAD_FOLDER'], media.location)
def upload_media(course_id, file):
filename = secure_filename(file.filename)
new_media = Media(course_id, filename, -1, None, get_file_extension(filename))
new_media.location = os.path.join(get_courses_dir(course_id), filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], new_media.location))
db.session.add(new_media)
db.session.commit()
def get_file_extension(filename):
return filename.rsplit('.', 1)[1]
def is_allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True)
|
gratefulfrog/ArduGuitar
|
Ardu2/design/POC-3_MAX395/pyboard/V1_WithHMI/PyHMI/oClasses.py
|
Python
|
gpl-2.0
| 5,003
| 0.017789
|
#oClasses.py
import Classes
class LCDMgr:
display = 0
editing = 1
eoEdit = 2
error = 3
confirmAbortMode = 4
#modes = [display, editing, eoEdit, error,confirmAbortMode]
cursorOff =-1
cursorStart = 0
eoLine = 15
lineLength = 16
letters = ['A','a','B','b','C','c','D','d'] # lower case => inverted
symbols = ['(','|','+',')']
def initChars():
#print('initChars')
return LCDMgr.letters + LCDMgr.smybols
def setDisplayMode(self):
#print('setDisplayMode')
self.lastClick='r'
self.mode = LCDMgr.display
self.cursor = LCDMgr.cursorOff
self.displayCharList = list(self.stateString.ljust(LCDMgr.lineLength))
self.lcd.setLn(0,self.stateString)
self.lcd.setLn(1,self.stateName)
def loadConf(self):
print('in lcd mgr loading conf...')
self.stateString = self.stateDict[self.sKey]
self.stateName = self.stateDict[self.nKey][:16]
self.lcd.setLn(0,self.stateString)
self.lcd.setLn(1,self.stateName)
self.setDisplayMode()
def __init__(self,(stateDict,sKey,nKey),lcdd, q, validateFunc):
self.stateDict = stateDict
self.sKey = sKey
self.nKey = nKey
self.lcd = lcdd
self.validateFunc = validateFunc
self.lcdPba = Classes.LCDPBArray(q)
def display(self):
self.lcd.display()
self.lcdPba.display()
def setSList(self):
#print('setSList')
self.sList = [' '] + LCDMgr.symbols + self.lettersLeft
def setEditingMode(self, special = None):
#print('setEditingMode')
self.mode=LCDMgr.editing
self.lastClick='r'
self.cursor = 0
self.lettersLeft = LCDMgr.letters
self.charPtr = 0
self.setSList()
self.displayCharList = list(' ' * (LCDMgr.lineLength))
self.updateEditDisplay(special)
def updateEditDisplay(self, special=None):
# this is a stub
msg = ''.join(self.displayCharList)
self.lcd.setLn(0,msg)
#print(msg)
if self.cursor>=0:
msg = ' '* self.cursor + '^' + ' Error!' if special else ' '* self.cursor + '^'
#print(msg)
self.lcd.setLn(1,msg)
def setConfirmAbortMode(self):
#print('setConfirmAbortMode')
self.mode = LCDMgr.confirmAbortMode
def doConfirm(self):
# '0123456789ABCDEF'
msg = 'Abort - Confirm'
self.lcd.setLn(1,msg)
#print(msg)
self.setConfirmAbortMode()
def setEOEMode(self):
#print('setEOEMode')
self.mode = LCDMgr.eoEdit
#self.cursor = LCDMgr.cursorOff
self.doConfirm()
def advanceCursor(self):
#print('advanceCursor')
self.cursor +=1
self.charPtr = 0
self.lettersLeft = [x for x in self.lettersLeft
if x.upper() not in self.displayCharList[0:self.cursor] and
x.lower() not in self.displayCharList[0:self.cursor]]
self.setSList()
self.updateEditDisplay()
def onLeftButton(self):
#print('onLeftButton')
if self.mode == LCDMgr.display:
# set edit mode
self.setEditingMode()
elif self.cursor == LCDMgr.eoLine:
#end of theline, set eoline mode
self.setEOEMode()
elif self.mode == LCDMgr.eoEdit:
# return to editing mode
self.setEditingMode()
elif self.mode == LCDMgr.error:
self.setEditingMode()
elif self.mode == LCDMgr.confirmAbortMode:
self.setDisplayMode()
elif self.lastClick == 'l':
self.doConfirm()
else:
self.advanceCursor()
self.lastClick ='l'
#self.updateDisplay()
def incAtCursor(self):
#print('incAtCursor')
self.charPtr = (self.charPtr+1) % len(self.sList)
self.displayCharList[self.cursor]= self.sList[self.ch
|
arPtr]
self.updateEditDisplay()
def confirmed(self):
if self.validateFunc(self.lcd.getLn(0)): # put a real test here for the display Char list
self.
|
stateString = ''.join([c for c in self.displayCharList if c != ' '])
self.lcd.setLn(0,self.stateString)
self.lcd.setLn(1,self.stateName)
self.setDisplayMode()
else:
self.setEditingMode(True)
def onRightButton(self):
#print('onRightButton')
if self.mode == LCDMgr.display:
return
elif self.mode == LCDMgr.eoEdit:
self.doConfirm()
elif self.mode ==LCDMgr.error:
self.setDisplayMode()
elif self.mode == LCDMgr.confirmAbortMode:
self.confirmed()
else:
self.incAtCursor()
self.lastClick='r'
#self.updateDisplay()
|
artefactual/archivematica-storage-service
|
storage_service/locations/api/urls.py
|
Python
|
agpl-3.0
| 865
| 0
|
from __future__ import absolute_import
from django.conf.urls import include, url
from tastypie.api import Api
from locations.api import v1, v2
from locations.api.sword import views
v1_api = Api(api_name="v1")
v1_api.register(v1.SpaceResource())
v1_api.register(v1.LocationResource())
v1_api.register(v1.PackageResource())
v1_api.register(v1.PipelineResource())
v1_api.register(v1.AsyncResource())
v2_api = Api(api_name="v2")
v2_api.register(v2.SpaceResource())
v2_api.register(v2.LocationResource())
v2_api.register(v2.PackageResource())
v2_api.register(v2.PipelineResource())
v2_api.register(v2.A
|
syncResource())
urlpatterns = [
url(r"", include(v1_api.urls)),
url(r"v
|
1/sword/$", views.service_document, name="sword_service_document"),
url(r"", include(v2_api.urls)),
url(r"v2/sword/$", views.service_document, name="sword_service_document"),
]
|
dana-i2cat/felix
|
expedient/doc/plugins/samples/plugin/sample_resource/controller/GUIdispatcher.py
|
Python
|
apache-2.0
| 7,485
| 0.011089
|
"""
Graphical user interface functionalities for the
SampleResource Aggregate Manager.
@date: Jun 12, 2013
@author: CarolinaFernandez
"""
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import get_object_or_404
from expedient.clearinghouse.aggregate.models import Aggregate
from expedient.clearinghouse.slice.models import Slice
from expedient.common.messaging.models import DatedMessage
from expedient.common.utils.plugins.plugincommunicator import *
from expedient.common.utils.plugins.resources.link import Link
from expedient.common.utils.plugins.resources.node import Node
from expedient.common.utils.views import generic_crud
from sample_resource.controller.resource import SampleResource as SampleResourceController
from sample_resource.forms.SampleResource import SampleResource as SampleResourceModelForm
from sample_resource.models import SampleResource as SampleResourceModel,\
SampleResourceAggregate as SampleResourceAggregateModel
import copy
import logging
import xmlrpclib
def create_resource(request, slice_id, agg_id):
"""Show a page that allows user to add a SampleResource to the aggregate."""
if request.method == "POST":
# Shows error message when aggregate unreachable, disable SampleResource creation and get back to slice detail page
agg = Aggregate.objects.get(id = agg_id)
if agg.check_status() == False:
DatedMessage.objects.post_message_to_user(
"SampleResource Aggregate '%s' is not available" % agg.name,
request.user, msg_type=DatedMessage.TYPE_ERROR,)
return HttpResponseRedirect(reverse("slice_detail", args=[slice_id]))
if 'create_resource' in request.POST:
return HttpResponseRedirect(reverse("sample_resource_resource_crud", args=[slice_id, agg_id]))
else:
return HttpResponseRedirect(reverse("slice_detail", args=[slice_id]))
def resource_crud(request, slice_id, agg_id, resource_id = None):
"""
Show a page that allows user to create/edit SampleResource's to the Aggregate.
"""
slice = get_object_or_404(Slice, id = slice_id)
aggregate = Aggregate.objects.get(id = agg_id)
error_crud = ""
def pre_save(instance, created):
"""
Fills SampleResource instance prior to its saving.
Used within the scope of the generic_crud method.
"""
instance = SampleResourceController.fill(instance, slice, agg_id, resource_id)
try:
return generic_crud(request, obj_id=resource_id, model=SampleResourceModel,
form_class=SampleResourceModelForm,
template="sample_resource_resource_crud.html",
redirect=lambda inst: reverse("slice_detail", args=[slice_id]),
extra_context={"agg": aggregate, "slice": slice, "exception": error_crud, "breadcrumbs": (
("Home", reverse("home")),
("Project %s" % slice.project.name, reverse("project_detail", args=[slice.project.id])),
("Slice %s" % slice.name, reverse("slice_detail", args=[slice_id])),
("%s SampleResource" % "Update" if resource_id else "Create", reverse("sample_resource_resource_crud", args=[slice_id, agg_id])),)
}, extra_form_params={}, template_object_name="object", pre_save=pre_save,
post_save=None, success_msg=None)
except ValidationError as e:
# Django exception message handling is different to Python's...
error_crud = ";".join(e.messages)
except Exception as e:
print "[WARNING] Could not create resource in plugin 'sample_resource'. Details: %s" % str(e)
DatedMessage.objects.post_message_to_user(
"SampleResource might have been created, but some problem ocurred: %s" % str(e),
request.user, msg_type=DatedMessage.TYPE_ERROR)
return HttpResponseRedirect(reverse("slice_detail", args=[slice_id]))
def manage_resource(request, resource_id, action_type):
"""
Manages the actions executed over SampleResource's.
"""
if action_type == "delete":
SampleResourceController.delete(resource_id)
# Go to manage resources again
return HttpResponse("")
###
# Topology to show in the Expedient
#
def get_sr_list(slice):
return SampleResourceModel.objects.filter(slice_id = slice.uuid)
def get_sr_aggregates(slice):
sr_aggs = []
try:
sr_aggs = slice.aggregates.filter(leaf_name=SampleResourceAggregateModel.__name__.lower())
except:
pass
return sr_aggs
def get_node_description(node):
description = "<strong>Sample Resource: " + node.name + "</strong><br/><br/>"
description += "• Temperature: %s (°%s)" % (str(node.get_temperature()), str(node.get_temperature_scale()))
connections = ""
node_connections = node.get_connections()
for i, connection in enumerate(node_connections):
connections += connection.name
if i < len(node_connections)-1:
connections += ", "
description += "<br/>• Connected to: %s" % str(connections)
return description
def get_nodes_links(slice, chosen_group=None):
nodes = []
links = []
sr_aggs = get_sr_aggregates(slice)
# Getting image for the nodes
# FIXME: avoid to ask the user for the complete name of the method here! he should NOT know it
try:
image_url = reverse('img_media_sample_resource', args=("sensor-tiny.png",))
except:
image_url = 'sensor-tiny.png'
# For every SampleResource AM
for i, sr_agg in enumerate(sr_aggs):
sr_agg = sr_agg.sampleresourceaggregate
# Iterates over every SampleResource contained within the slice
for sr in sr_agg.get_resources():
sr = sr.sampleresource
nodes.append(Node(
# Users shall not be left the choice to choose group/island; otherwise collision may arise
name = sr.name, value = sr.id, aggregate = sr.aggregate, type = "Sample resource",
description = get_node_description(sr), image = image_url)
)
for connection in sr.get_connections():
# Two-ways link
links.append(
Link(
target = str(sr.id), source = str(connection.id),
value = "rsc_id_%s-rsc_id_%s" % (connection.id, sr.id)
),
)
links.append(
Link(
target = str(sr.id), source = str(connection.id),
value = "rsc_id_%s-rsc_id_%s" % (sr.id, connection.id)
),
)
return [nodes, links]
#from expedient.common.utils.plugins.plugininterface import PluginInterface
#
#class Plugin(PluginInterface):
# @staticmethod
def get_ui_data(slice):
"""
Hook method. Use this very same name so Expedient can get the resources for every plugin.
"""
ui_context = dict()
try:
ui_context['sr_list'] = get_sr_list(slice)
ui_context['sr_aggs'] = get_sr_aggreg
|
ates(slice)
ui_context['nodes'], ui_context['links'] = get_nodes_links(slice)
except Exception as e:
print "[ERROR] Problem loading UI data for p
|
lugin 'sample_resource'. Details: %s" % str(e)
return ui_context
|
DougFirErickson/neon
|
neon/backends/tests/test_tensor.py
|
Python
|
apache-2.0
| 4,727
| 0.000635
|
# ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
# pylint: skip-file
"""
Test of basic math operations on the Tensors and compare with numpy results
The Tensor types includes GPU and CPU Tensors
"""
import numpy as np
import itertools as itt
from neon.backends.nervanagpu import NervanaGPU
from neon.backends.nervanacpu import NervanaCPU
from neon.backends.tests.utils import assert_tensors_allclose
def init_helper(lib, inA, inB, dtype):
A = lib.array(inA, dtype=dtype)
B = lib.array(inB, dtype=dtype)
C = lib.empty(inB.shape, dtype=dtype)
return A, B, C
def math_helper(lib, op, inA, inB, dtype):
A, B, C = init_helper(lib, inA, inB, dtype)
if op == '+':
C[:] = A + B
elif op == '-':
C[:] = A - B
elif op == '*':
C[:] = A * B
elif op == '/':
C[:] = A / B
elif op == '>':
C[:] = A > B
elif op == '>=':
C[:] = A >= B
elif op == '<':
C[:] = A < B
elif op == '<=':
C[:] = A <= B
return C
def compare_helper(op, inA, inB, dtype):
numpy_result = math_helper(np, op, inA, inB, dtype=np.float32)
if np.dtype(dtype).kind == 'i' or np.dtype(dtype).kind == 'u':
numpy_result = np.around(numpy_result)
numpy_result = numpy_result.clip(
np.iinfo(dtype).min, np.iinfo(dtype).max)
numpy_result = numpy_result.astype(dtype)
if dtype in (np.float32, np.float16):
gpu = NervanaGPU(default_dtype=dtype)
nervanaGPU_result = math_helper(gpu, op, inA, inB, dtype=dtype)
nervanaGPU_result = nervanaGPU_result.get()
np.a
|
llclose(numpy_result, nervanaGPU_result, rtol=0, atol=1e-5)
cpu = NervanaCPU(default_dtype=dtype)
nervanaCPU_result = math_helper(cpu, op, inA, inB, dtype=dtype)
nervanaCPU_result = nervanaCPU_result.get()
np.allclose(numpy_result, nervanaCPU_result, rtol=0, atol=1e-5)
def rand_unif(dtype, dims):
|
if np.dtype(dtype).kind == 'f':
return np.random.uniform(-1, 1, dims).astype(dtype)
else:
iinfo = np.iinfo(dtype)
return np.around(np.random.uniform(iinfo.min, iinfo.max, dims)).clip(iinfo.min, iinfo.max)
def pytest_generate_tests(metafunc):
"""
Build a list of test arguments.
"""
dims = [(64, 327),
(64, 1),
(1, 1023),
(4, 3),
]
dtypes = [np.float32, np.float16]
if 'fargs_tests' in metafunc.fixturenames:
fargs = itt.product(dims, dtypes)
metafunc.parametrize("fargs_tests", fargs)
def test_math(fargs_tests):
dims, dtype = fargs_tests
randA = rand_unif(dtype, dims)
randB = rand_unif(dtype, dims)
compare_helper('+', randA, randB, dtype)
compare_helper('-', randA, randB, dtype)
compare_helper('*', randA, randB, dtype)
compare_helper('>', randA, randB, dtype)
compare_helper('>=', randA, randB, dtype)
compare_helper('<', randA, randB, dtype)
compare_helper('<=', randA, randB, dtype)
def test_slicing(fargs_tests):
dims, dtype = fargs_tests
gpu = NervanaGPU(default_dtype=dtype)
cpu = NervanaCPU(default_dtype=dtype)
array_np = np.random.uniform(-1, 1, dims).astype(dtype)
array_ng = gpu.array(array_np, dtype=dtype)
array_nc = cpu.array(array_np, dtype=dtype)
assert_tensors_allclose(array_ng[0], array_nc[0], rtol=0, atol=1e-3)
assert_tensors_allclose(array_ng[-1], array_nc[-1], rtol=0, atol=1e-3)
assert_tensors_allclose(array_ng[0, :], array_nc[0, :], rtol=0, atol=1e-3)
assert_tensors_allclose(array_ng[0:], array_nc[0:], rtol=0, atol=1e-3)
assert_tensors_allclose(array_ng[:-1], array_nc[:-1], rtol=0, atol=1e-3)
assert_tensors_allclose(array_ng[:, 0], array_nc[:, 0], rtol=0, atol=1e-3)
assert_tensors_allclose(array_ng[:, 0:1], array_nc[:, 0:1], rtol=0, atol=1e-3)
assert_tensors_allclose(array_ng[-1, 0:], array_nc[-1:, 0:], rtol=0, atol=1e-3)
array_ng[0] = 0
array_nc[0] = 0
assert_tensors_allclose(array_ng, array_nc, rtol=0, atol=1e-3)
del(gpu)
|
lucidlylogicole/monkey
|
monkey.py
|
Python
|
gpl-3.0
| 11,008
| 0.014353
|
from PyQt4 import QtGui, QtCore, QtWebKit, Qsci
import os, sys, pickle
from monkey_ui import Ui_Monkey
sys.path.append('../')
import settings
class Monkey(QtGui.QWidget):
def __init__(self,parent=None):
QtGui.QWidget.__init__(self,parent)
self.ui = Ui_Monkey()
self.ui.setupUi(self)
# Connect Signals
QtCore.QObject.connect(self.ui.b_run,QtCore.SIGNAL("clicked()"),self.run)
QtCore.QObject.connect(self.ui.b_export,QtCore.SIGNAL("clicked()"),self.export)
QtCore.QObject.connect(self.ui.b_save,QtCore.SIGNAL("clicked()"),self.saveProject)
QtCore.QObject.connect(self.ui.b_new,QtCore.SIGNAL("clicked()"),self.newProject)
QtCore.QObject.connect(self.ui.li_projects,QtCore.SIGNAL("itemDoubleClicked(QListWidgetItem*)"),self.openProject)
QtCore.QObject.connect(self.ui.tab_bottom,QtCore.SIGNAL("currentChanged(int)"),self.codeTabChange)
self.setup()
def setup(self):
self.currentProject = None
# Setup Preview
self.ui.wv_preview.settings().setAttribute(QtWebKit.QWebSettings.PluginsEnabled,True)
self.ui.wv_preview.settings().setAttribute(QtWebKit.QWebSettings.JavaEnabled,True)
self.ui.wv_preview.settings().setAttribute(QtWebKit.QWebSettings.JavascriptEnabled,True)
self.ui.wv_preview.settings().setAttribute(QtWebKit.QWebSettings.JavascriptCanOpenWindows,True)
self.setupInspector()
self.ui.split_wv.addWidget(self.webInspector)
# Setup QSci's
settings.setupHTML(self.ui.te_html,Qsci.QsciLexerHTML())
settings.setupJs(self.ui.te_js,Qsci.QsciLexerJavaScript())
settings.setupCSS(self.ui.te_css,Qsci.QsciLexerCSS())
self.ui.split_main.setSizes([500,500])
# Javascript Plugins
self.loadPlugins()
# Load Projects
self.projPath = settings.getProjectPath()
self.loadProjects()
# Setup Project Title And Save
self.ui.fr_proj = QtGui.QFrame()
layout = QtGui.QGridLayout()
layout.setContentsMargins(0,0,0,0)
layout.setSpacing(1)
self.ui.fr_proj.setLay
|
out(layout)
self.ui.le_project.setPlaceholderText('Pro
|
ject Name')
self.ui.fr_proj.layout().addWidget(self.ui.b_new ,0,0,1,1)
self.ui.fr_proj.layout().addWidget(self.ui.le_project ,0,1,1,1)
self.ui.fr_proj.layout().addWidget(self.ui.b_save ,0,2,1,1)
self.ui.tab_top.setCornerWidget(self.ui.fr_proj)
self.ui.le_project.show()
#---Preview
def run(self):
# Check Plugins
phtml = ''
angular = '' # Check for angular
if os.name =='nt':
pfx="file:///"
else:
pfx="file://"
for i in range(self.ui.li_plugins.count()):
itm = self.ui.li_plugins.item(i)
if itm.checkState() > 0:
jsfile = unicode(itm.data(33).toString())
if jsfile.startswith('http'): # IF HTTP don't add file prefix
rpfx = ''
else:
rpfx = pfx
if jsfile.endswith('.js'): # Javascript
phtml+='<script src="'+rpfx+jsfile+'" type="text/javascript"></script>'
else: # CSS
phtml+='<link href="'+rpfx+jsfile+'" rel="stylesheet" media="screen">'
if 'angular.' in jsfile:
angular = 'ng-app'
# Build HTML
html = '''<!DOCTYPE html>
<html '''+angular+'''>
<!--HEAD-->
<head>'''
html += phtml
# CSS
html += '<style>'
html += self.ui.te_css.text()
html += '</style>'
# Javascript
html += '<script>'+self.ui.te_js.text()+'</script>'
html += '</head>'
html += '<body>'
# Html
html += self.ui.te_html.text()
self.ui.wv_preview.setHtml(html)
def export(self):
cpth = QtGui.QFileDialog.getExistingDirectory()
pname = unicode(self.ui.le_project.text())
if not cpth.isEmpty() and pname != '':
cpth += '/'
# Check Plugins
phtml = ''
angular = '' # Check for angular
if os.name =='nt':
pfx="file:///"
else:
pfx="file://"
for i in range(self.ui.li_plugins.count()):
itm = self.ui.li_plugins.item(i)
if itm.checkState() > 0:
jsfile = unicode(itm.data(33).toString())
if jsfile.startswith('http'): # IF HTTP don't add file prefix
rpfx = ''
else:
rpfx = pfx
if jsfile.endswith('.js'): # Javascript
phtml+='<script src="'+rpfx+jsfile+'" type="text/javascript"></script>'
else: # CSS
phtml+='<link href="'+rpfx+jsfile+'" rel="stylesheet" media="screen">'
if 'angular.' in jsfile:
angular = 'ng-app'
# Build HTML
html = '''<!DOCTYPE html>
<html '''+angular+'''>
<!--HEAD-->
<head>'''
html += phtml
# CSS
css = unicode(self.ui.te_css.text())
if css !='':
html += '<link href="'+pfx+cpth+pname+'.css" rel="stylesheet" media="screen">'
f = open(cpth+pname+'.css','w')
f.write(css)
f.close()
# Javascript
js = unicode(self.ui.te_js.text())
if js != '':
html += '<script src="'+rpfx+cpth+pname+'.js" type="text/javascript"></script>'
f = open(cpth+pname+'.js','w')
f.write(js)
f.close()
html += '</head>'
html += '<body>'
# Html
html += self.ui.te_html.text()
f = open(cpth+pname+'.html','w')
f.write(html)
f.close()
#---Code
def codeTabChange(self,ind):
pass
#---Webview Setup
def setupInspector(self):
page = self.ui.wv_preview.page()
page.settings().setAttribute(QtWebKit.QWebSettings.DeveloperExtrasEnabled, True)
self.webInspector = QtWebKit.QWebInspector(self)
self.webInspector.setPage(page)
shortcut = QtGui.QShortcut(self)
shortcut.setKey(QtCore.Qt.Key_F12)
shortcut.activated.connect(self.toggleInspector)
self.webInspector.setVisible(False)
def toggleInspector(self):
self.webInspector.setVisible(not self.webInspector.isVisible())
#---Projects
def loadProjects(self):
self.ui.li_projects.clear()
if os.path.exists(self.projPath):
for p in sorted(os.listdir(self.projPath)):
itm = QtGui.QListWidgetItem(p[:-7])
itm.setToolTip(self.projPath+p)
self.ui.li_projects.addItem(itm)
else:
os.mkdir(self.projPath)
def saveProject(self):
proj_nm = unicode(self.ui.le_project.text())
if proj_nm != '':
proj = self.buildProject()
pickle.dump(proj,open(self.projPath+proj_nm+'.monkey','wb'))
self.loadProjects()
self.currentProject = proj
def buildProject(self):
proj_nm = unicode(self.ui.le_project.text())
plugs = {}
# Load Plugins
for i in range(self.ui.li_plugins.count()):
itm = self.ui.li_plugins.item(i)
if itm.checkState() > 0:
jsfile = unicode(itm.data(33).toString())
jsname = unicode(itm.text())
plugs[jsname]=jsfile
# Project Object
proj = {
'name':proj_nm,
|
rogerthat-platform/rogerthat-backend
|
src/rogerthat/service/api/app.py
|
Python
|
apache-2.0
| 17,482
| 0.002402
|
# -*- coding: utf-8 -*-
# Copyright 2017 GIG Technology NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.3@@
import json
from Queue import Queue, Empty
from threading import Thread
from types import NoneType
from google.appengine.ext import db
from mcfw.consts import MISSING
from mcfw.rpc import returns, arguments
from rogerthat.bizz import app as bizz_app
from rogerthat.bizz.app import get_app_statistics
from rogerthat.bizz.look_and_feel import put_app_look_and_feel, delete_app_look_and_feel, list_app_look_and_feel, \
test_app_look_and_feel
from rogerthat.bizz.service import validate_app_admin, get_and_validate_service_identity_user, InvalidValueException, \
get_and_validate_app_id_for_service_identity_user
from rogerthat.dal.app import get_app_translations
from rogerthat.dal.friend import get_friends_map_key_by_user
from rogerthat.dal.profile import get_user_profiles_by_app_id
from rogerthat.dal.registration import list_installations_by_app, get_installation_logs_by_installation, \
get_mobiles_and_profiles_for_installations
from rogerthat.dal.service import get_default_service_identity
from rogerthat.exceptions.look_and_feel import LookAndFeelNotFoundException, StyleNotFoundInNavigationItemsException
from rogerthat.models import ServiceIdentity, Installation, ServiceProfile
from rogerthat.models.apps import AppLookAndFeel
from rogerthat.models.properties.app import AutoConnectedService
from rogerthat.rpc import users
from rogerthat.rpc.models import ServiceAPICallback
from rogerthat.rpc.rpc import mapping
from rogerthat.rpc.service import service_api, service_api_callback
from rogerthat.to.app import AppInfoTO, AppUserListResultTO, AppUserTO, AppSettingsTO, PutLoyaltyUserResultTO, \
AppLookAndFeelTO, NavigationItemTO, HomeScreenSettingsTO, AppTranslationTO
from rogerthat.to.installation import InstallationListTO, InstallationLogTO, InstallationTO
from rogerthat.to.messaging import BaseMemberTO
from rogerthat.to.statistics import AppServiceStatisticsTO
from rogerthat.utils import colorscale
from rogerthat.utils.app import create_app_user, get_app_user_tuple
from rogerthat.utils.crypto import decrypt, encrypt
from rogerthat.utils.service import add_slash_default
#############################################
# DO NOT DOCUMENT THIS SERVICE API FUNCTION #
@service_api(function=u'app.get_info')
@returns(AppInfoTO)
@arguments(app_id=unicode)
def get_info(app_id):
app = bizz_app.get_app(app_id)
return AppInfoTO.fromModel(app) if app else None
@service_api(function=u'app.put_user_regexes')
@returns()
@arguments(app_id=unicode, regexes=[unicode])
def put_user_regexes(app_id, regexes):
service_user = users.get_current_user()
validate_app_admin(service_user, [app_id])
bizz_app.put_user_regexes(service_user, app_id, regexes)
@service_api(function=u'app.del_user_regexes')
@returns()
@arguments(app_id=unicode, regexes=[unicode])
def del_user_regexes(app_id, regexes):
service_user = users.get_current_user()
validate_app_admin(service_user, [app_id])
bizz_app.del_user_regexes(service_user, app_id, regexes)
@service_api(function=u'app.add_auto_connected_services')
@returns(NoneType)
@arguments(app_id=unicode, services=[AutoConnectedService], auto_connect_now=bool)
def add_auto_connected_services(app_id, services, auto_connect_now=True):
service_user = users.get_current_user()
validate_app_admin(service_user, [app_id])
bizz_app.add_auto_connected_services(app_id, services, auto_connect_now)
@service_api(function=u'app.delete_auto_connected_service')
@returns(NoneType)
@arguments(app_id=unicode, service_identity_email=unicode)
def delete_auto_connected_service(app_id, service_identity_email):
service_user = users.get_current_user()
validate_app_admin(service_user, [app_id])
bizz_app.delete_auto_connected_service(service_user, app_id, service_identity_email)
@service_api(function=u'app.put_profile_data')
@returns(NoneType)
@arguments(email=unicode, profile_data=unicode, app_id=unicode)
def put_profile_data(email, profile_data, app_id):
from rogerthat.bizz.profile import set_profile_data
service_user = users.get_current_user()
validate_app_admin(service_user, [app_id])
set_profile_data(service_user, create_app_user(users.User(email), app_id), profile_data)
@service_api(function=u'app.del_profile_data')
@returns(NoneType)
@arguments(email=unicode, profile_data_keys=[unicode], app_id=unicode)
def del_profile_data(email, profile_data_keys, app_id):
from rogerthat.bizz.profile import set_profile_data
service_user = users.get_current_user()
validate_app_admin(service_user, [app_id])
set_profile_data(service_user,
create_app_user(users.User(email), app_id),
json.dumps(dict(((key, None) for key in profile_data_keys))))
@service_api(function=u'app.list_users')
@returns(AppUserListResultTO)
@arguments(app_id=unicode, cursor=unicode)
def list_users(app_id, cursor):
service_user = users.get_current_user()
validate_app_admin(service_user, [app_id])
if cursor:
cursor = decrypt(service_user, cursor)
query = get_user_profiles_by_app_id(app_id)
query.with_cursor(cursor)
user_profiles = query.fetch(1000)
cursor = query.cursor()
extra_key = query.fetch(1)
result = AppUserListResultTO()
result.cursor = unicode(encrypt(service_user, cursor)) if len(extra_key) > 0 else None
work = Queue()
results = Queue()
for items in [user_profiles[x:x + 50] for x in xrange(0, len(user_profiles), 50)]:
work.put(items)
def slave():
while True:
try:
user_profiles = work.get_nowait()
except Empty:
break # No more work, goodbye
try:
friendMaps = db.get([get_friends_map_key_by_user(user_profile.user) for user_profile in user_profiles])
for user_profile, friendMap in zip(user_profiles, friendMaps):
results.put(AppUserTO(user_profile, friendMap))
except Exception, e:
results.put(e)
threads = list()
for _ in xrange(10):
t = Thread(target=slave)
t.start()
threads.append(t)
for t in threads:
t.join()
result.users = list()
while not results.empty():
app_user = results.get()
if isinstance(app_user, AppUserTO):
result.users.append(app_user)
else:
raise app_user
return result
@service_api(function=u'app.get_settings')
@returns(AppSettingsTO)
@arguments(app_id=unicode)
def get_settings(app_id=None):
"""
Args:
app_id (unicode)
Returns:
AppSettingsTO
"""
service_user = users.get_current_user()
if not app_id:
app_id = get_default_service_identity(service_user).app_id
validate_app_admin(service_user, [app_id])
return AppSettingsTO.from_model(bizz_app.get_app_settings(app_id))
@service_api(function=u'app.put_settings')
@returns(AppSettingsTO)
@arguments(settings=AppSettingsTO, app_id=unicode)
def put_settings(settings, app_id=None):
"""
Args:
settings (AppSettingsT
|
O)
app_id (unicode)
Returns:
AppSettingsTO
"""
service_user = users.get_current_user()
if not app_id:
app_id = get_default_service_identity(service_user).app_id
validate_app_admin(service_user, [app_id])
return AppSettingsTO.from_model(bizz_app.put_settings(app_id, settings))
@servic
|
e_api(function=u'app.put_loyalty_user')
@returns(PutLoyaltyUserResultTO)
@arguments(url=unicode, email=unicode)
def put_lo
|
bwind/propeller
|
propeller/response.py
|
Python
|
bsd-2-clause
| 4,589
| 0.000436
|
from datetime import timedelta
from propeller.cookie import Cookie
from propeller.options import Options
from propeller.template import Template
from propeller.util.dict import MultiDict
from urllib import quote
import httplib
import propeller
class Response(object):
def __init__(self, body='', status_code=200, content_type='text/html'):
self.body = body
self.status_code = status_code
self.headers = MultiDict()
self.cookies = []
self.headers['Content-Type'] = content_type
def _get_status_code(self):
return self._status_code
def _set_status_code(self, status_code):
assert status_code >= 200 and status_code <= 500, \
'status_code must be an int between 200 and 500'
self._status_code = status_code
def _get_body(self):
return self._body
def _set_body(self, body):
assert isinstance(body, basestring) or isinstance(body, Template), \
'body must be an instance of basestring or Template'
if isinstance(body, basestring):
self._body = body
elif isinstance(body, Template):
self._body = str(body)
def _build_headers(self):
self.headers['Content-Length'] = len(self.body)
if 'Content-Type' not in self.headers or not \
self.headers['Content-Type'][0]:
self.headers['Content-Type'] = 'text/html; charset=utf-8'
status = 'HTTP/1.0 %d %s' % (self.status_code,
httplib.responses[self.status_code])
headers = ['%s: %s' % (k, v) for k, v in self.headers.items()]
headers += ['Set-Cookie: %s' % str(c) for c in self.cookies]
headers = '\r\n'.join([status] + headers) + '\r\n\r\n'
return headers
|
def _error_page(self, title, subtitle='', traceback=None):
|
t = Options.tpl_env.get_template('error.html')
return t.render(
title=title,
subtitle=subtitle,
traceback=traceback,
version=propeller.__version__
)
def set_cookie(self, name, value, domain=None, expires=None, path=None,
secure=False):
self.cookies.append(Cookie(name=name, value=value, domain=domain,
expires=expires, path=path, secure=secure))
def __str__(self):
return self._build_headers() + self.body
status_code = property(_get_status_code, _set_status_code)
body = property(_get_body, _set_body)
class RedirectResponse(Response):
def __init__(self, redirect_url, permanent=False, *args, **kwargs):
status_code = 301 if permanent else 302
super(RedirectResponse, self).__init__(status_code=status_code, *args,
**kwargs)
self.redirect_url = redirect_url
def __str__(self):
if 'Location' not in self.headers:
self.headers['Location'] = self.redirect_url
return super(RedirectResponse, self).__str__()
class BadRequestResponse(Response):
def __init__(self, *args, **kwargs):
super(BadRequestResponse, self).__init__(status_code=400, *args,
**kwargs)
def __str__(self):
if not self.body and Options.debug:
self.body = self._error_page(httplib.responses[self.status_code])
return super(BadRequestResponse, self).__str__()
class NotFoundResponse(Response):
def __init__(self, url=None, *args, **kwargs):
super(NotFoundResponse, self).__init__(status_code=404, *args,
**kwargs)
self.url = url
def __str__(self):
if not self.body and Options.debug:
self.body = self._error_page(httplib.responses[self.status_code],
self.url)
return super(NotFoundResponse, self).__str__()
class InternalServerErrorResponse(Response):
def __init__(self, title, subtitle, traceback, *args, **kwargs):
super(InternalServerErrorResponse, self).__init__(status_code=500,
*args, **kwargs)
self.title = title
self.subtitle = subtitle
self.traceback = traceback
def __str__(self):
if not self.body and Options.debug:
self.body = self._error_page(self.title,
self.subtitle,
self.traceback)
return super(InternalServerErrorResponse, self).__str__()
|
chengduoZH/Paddle
|
python/paddle/fluid/profiler.py
|
Python
|
apache-2.0
| 13,034
| 0.003683
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from . import core
from .wrapped_decorator import signature_safe_contextmanager
import os
import six
__all__ = [
'cuda_profiler', 'reset_profiler', 'profiler', 'start_profiler',
'stop_profiler'
]
NVPROF_CONFIG = [
"gpustarttimestamp",
"gpuendtimestamp",
"gridsize3d",
"threadblocksize",
"streamid",
"enableonstart 0",
"conckerneltrace",
]
@signature_safe_contextmanager
def cuda_profiler(output_file, output_mode=None, config=None):
"""
The CUDA profiler.
This fuctions is used to profile CUDA program by CUDA runtime application
programming interface. The profiling result will be written into
`output_file`. The users can set the output mode by `output_mode` argument
and set the nvidia
|
profiling config by `config` argument.
After getting the profiling result file, users can use
`NVIDIA Visual Profiler <https://developer.nvidia.com/nvidia-visual-profiler>`_
to load this output file to visualize results.
Args:
output_file (str) : The output file name, the result will be
written into this file.
output_mode (str, optional) : The output mode has Key-Value pair format ('kvp')
|
and Comma separated values format ('csv', default).
config (list<str>, optional) : Nvidia profile config. Default config is
['gpustarttimestamp', 'gpuendtimestamp', 'gridsize3d', 'threadblocksize',
'streamid', 'enableonstart 0', 'conckerneltrace']. For more details, please
refer to `Compute Command Line Profiler User Guide <https://developer.download.nvidia.cn/compute/DevZone/docs/html/C/doc/Compute_Command_Line_Profiler_User_Guide.pdf>`_ .
Raises:
ValueError: If `output_mode` is not in ['kvp', 'csv'].
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle.fluid.profiler as profiler
import numpy as np
epoc = 8
dshape = [4, 3, 28, 28]
data = fluid.data(name='data', shape=[None, 3, 28, 28], dtype='float32')
conv = fluid.layers.conv2d(data, 20, 3, stride=[1, 1], padding=[1, 1])
place = fluid.CUDAPlace(0)
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
output_file = 'cuda_profiler.txt'
with profiler.cuda_profiler(output_file, 'csv') as nvprof:
for i in range(epoc):
input = np.random.random(dshape).astype('float32')
exe.run(fluid.default_main_program(), feed={'data': input})
# then use NVIDIA Visual Profiler (nvvp) to load this output file
# to visualize results.
"""
if output_mode is None:
output_mode = 'csv'
if output_mode not in ['kvp', 'csv']:
raise ValueError("The output mode must be 'kvp' or 'csv'.")
config = NVPROF_CONFIG if config is None else config
config_file = 'nvprof_config_file'
with open(config_file, 'wb') as fp:
fp.writelines([six.b("%s\n" % item) for item in config])
core.nvprof_init(output_file, output_mode, config_file)
# Enables profiler collection by the active CUDA profiling tool.
core.nvprof_start()
yield
# Disables profiler collection.
core.nvprof_stop()
os.remove(config_file)
def reset_profiler():
"""
Clear the previous time record. This interface does not work for
`fluid.profiler.cuda_profiler`, it only works for
`fluid.profiler.start_profiler`, `fluid.profiler.stop_profiler`,
and `fluid.profiler.profiler`.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle.fluid.profiler as profiler
with profiler.profiler('CPU', 'total', '/tmp/profile'):
for iter in range(10):
if iter == 2:
profiler.reset_profiler()
# ...
"""
core.reset_profiler()
def start_profiler(state):
"""
Enable the profiler. Uers can use `fluid.profiler.start_profiler` and
`fluid.profiler.stop_profiler` to profile, which is equal to the usage
of `fluid.profiler.profiler` interface.
Args:
state (str) : The profiling state, which should be one of 'CPU', 'GPU'
or 'All'. 'CPU' means only profiling CPU; 'GPU' means profiling
both CPU and GPU; 'All' means profiling both CPU and GPU, and
generates timeline as well.
Raises:
ValueError: If `state` is not in ['CPU', 'GPU', 'All'].
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle.fluid.profiler as profiler
profiler.start_profiler('GPU')
for iter in range(10):
if iter == 2:
profiler.reset_profiler()
# except each iteration
profiler.stop_profiler('total', '/tmp/profile')
"""
if core.is_profiler_enabled():
return
if state not in ['CPU', 'GPU', "All"]:
raise ValueError("The state must be 'CPU' or 'GPU' or 'All'.")
if state == "GPU":
prof_state = core.ProfilerState.kCUDA
elif state == "CPU":
prof_state = core.ProfilerState.kCPU
else:
prof_state = core.ProfilerState.kAll
core.enable_profiler(prof_state)
def stop_profiler(sorted_key=None, profile_path='/tmp/profile'):
"""
Stop the profiler. Uers can use `fluid.profiler.start_profiler` and
`fluid.profiler.stop_profiler` to profile, which is equal to the usage
of `fluid.profiler.profiler` interface.
Args:
sorted_key (str, optional) : The order of profiling results, which
should be one of None, 'calls', 'total', 'max', 'min' or 'ave'.
Default is None, means the profiling results will be printed
in the order of first end time of events.
The `calls` means sorting by the number of calls.
The `total` means sorting by the total execution time.
The `max` means sorting by the maximum execution time.
The `min` means sorting by the minimum execution time.
The `ave` means sorting by the average execution time.
profile_path (str, optional) : If state == 'All', it will generate timeline,
and write it into `profile_path`. The default profile_path is `/tmp/profile`.
Raises:
ValueError: If `sorted_key` is not in
['calls', 'total', 'max', 'min', 'ave'].
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle.fluid.profiler as profiler
profiler.start_profiler('GPU')
for iter in range(10):
if iter == 2:
profiler.reset_profiler()
# except each iteration
profiler.stop_profiler('total', '/tmp/profile')
"""
if not core.is_profiler_enabled():
return
sorted_key = 'default' if sorted_key is None else sorted_key
if sorted_key not in ['default', 'calls', 'total', 'max', 'min', 'ave']:
raise ValueError("The sorted_key must be None or in 'calls', 'total', "
"'max', 'min' and 'ave'")
key_map = {
'default': core.EventSortingKey.kDefault,
'calls': core.EventSortingKey.kCalls,
'total': core.EventSortingKey.kTotal,
'max': core.EventSortingKey.kMax,
'min': cor
|
linefeedse/korjournal
|
www/korjournal/permissions.py
|
Python
|
gpl-2.0
| 2,170
| 0.003687
|
from rest_framework import permissions
from korjournal.models import Driver
class IsOwner(permissions.BasePermission):
"""
Custom permission to only allow owners of an object to see it
"""
def has_object_permission(self, request, view, obj):
user = request.user
|
try:
if (obj.owner == user):
return True
else:
return False
except AttributeError:
try:
if (obj.vehicle.owner == user):
return True
else:
|
return False
except AttributeError:
try:
if (obj.odometersnap.vehicle.owner == user):
return True
else:
return False
except AttributeError:
return False
return False
class IsDriver(permissions.BasePermission):
"""
Custom permission to only allow drivers/uploaders of an object to see it
"""
def has_object_permission(self, request, view, obj):
if IsOwner.has_object_permission(self, request, view, obj):
return True
user = request.user
try:
if (obj.driver == user):
return True
else:
return False
except AttributeError:
try:
driver = Driver.objects.filter(user=request.user,vehicle=obj)[0]
return True
except IndexError:
return False
return False
class AnythingGoes(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
return True
class DenyAll(permissions.BasePermission):
def has_permission(self,request,view):
return False
def has_object_permission(self, request, view, obj):
return False
class IsAdminOrPartialUpdate(permissions.BasePermission):
def has_permission(self, request, view):
if view.action == 'partial_update':
return True
else:
return permissions.IsAdminUser.has_permission(self, request, view)
|
marev711/scripts
|
medlemsinput.py
|
Python
|
mit
| 2,300
| 0.006957
|
# Graphical paste and save GUI for adding members
from Tkinter import *
import os
import pdb
import datetime
lday = 04
lmonth = 10
class myDate:
def __init__(self, year, month, day):
self.date = datetime.datetime(year, month, day)
self.updateString()
def getMonthDay(self):
lday = format(self.date.day, '02')
lmonth = format(self.date.month, '02')
return lmonth + lday
def getfilename(self):
lfdate = self.getMonthDay()
lfname = lfdate + "-" + nextfname(lfdate) + ".txt"
return lfname
def updateString(self):
self.datestr = self.date.strftime("%m%d")
def updateDate(self, dt_obj):
self.date = dt_obj
date = myDate(2015, lmonth, lday)
def save(date):
f = open(date.getfilename(), "w")
t = text.get("1.0", END)
f.write(t.encode('utf8'))
f.close()
lfname = date.getfilename()
llabel.configure(text = lfname)
def add_day(date):
dt = datetime.datetime(2015, date.date.month, date.date.day)
dt = dt + datetime.timedelta(days=1)
date.updateDate(dt)
date.updateString()
lfname = date.getfilename()
llabel.configure(text = lfname)
def sub_day(date):
dt = datetime.datetime(2015, date.date.month, date.date.day)
dt = dt - datetime.timedelta(days=1)
date.updateDate(dt)
date.updateString()
lfname = date.getfilename()
llabel.configure(text = lfname)
def select_all(event):
text.tag_a
|
dd(SEL, "1.0", END)
text.mark_set(INSERT, "1.0")
text.see(INSERT)
return 'break'
def nextfname(prefix):
first = 1
fstr = format(first, '02')
while os.path.exists(prefix + "-" + fstr + ".txt"):
f
|
irst = first + 1
fstr = format(first, '02')
return fstr
root = Tk()
text = Text(root)
text.insert(INSERT, "")
text.bind("<Control-Key-a>", select_all)
text.grid()
bsave = Button(root, text="Save", command=lambda: save(date))
bsave.grid(columnspan=2, column=1, row=0)
dplus = Button(root, text="d+", command=lambda: add_day(date))
dplus.grid(column=1, row=1)
dminus = Button(root, text="d-", command=lambda: sub_day(date))
dminus.grid(column=2, row=1)
lfname = date.getfilename()
llabel = Label(root, text=lfname)
llabel.grid(columnspan=2, column=1, row=2)
root.mainloop()
|
ElliottH/tvrename
|
tvrename.py
|
Python
|
mit
| 7,125
| 0.002105
|
#!/usr/bin/env python
from __future__ import print_function
import os
import sys
import argparse
import re
import shutil
class Colour(object):
"""Utility class for colour printing messages"""
RED = '\033[91m'
WHITE = '\033[97m'
END = '\033[0m'
@staticmethod
def print(colour, msg, end="\n"):
"""Print msg in colour"""
print("%s%s%s" % (colour, msg, Colour.END), end=end)
@staticmethod
def red(msg, end="\n"):
"""Print msg in red"""
Colour.print(Colour.RED, msg, end)
@staticmethod
def white(msg, end="\n"):
"""Print msg in white"""
Colour.print(Colour.WHITE, msg, end)
class Renamer(object):
"""
Takes files in the following forms:
* Awesome.Show.S01E01.SOURCE.FMT-GRP.ext
* awesome.show.101.source-grp.ext
It can then either:
* rename the file to S01E01.ext
* move the file to Awesome Show/S01/S01E01.ext
"""
# It's important to test with SE before ALT, because ALT may erroneously
# match SE format series names that contain the year.
SE_REGEX = re.compile(r"^(.+)\.S(\d+)E(\d+).+\.(...)$", re.IGNORECASE)
ALT_REGEX = re.compile(r"^(.+)\.(\d+)\.[^-]+?-[^.]+\.(...)$")
def __init__(self, args):
self.args = args
@staticmethod
def find_candidates(directory, pattern):
"""Find subdirectories of directory that match pattern"""
list_path = [i for i in os.listdir(directory) if
os.path.isdir(os.path.join(directory, i))]
return [os.path.join(directory, j) for j in list_path if
re.match(pattern, j,
re.IGNORECASE)]
@staticmethod
def ask_candidates(candidates):
"""Ask the user to pick from a list"""
Colour.white("Candidates are:")
for (idx, name) in enumerate(candidates):
print("[%d] %s" % (idx + 1, name))
while True:
choice = raw_input("%sChoice? [1-%d/Q]%s " % (Colour.WHITE,
len(candidates),
Colour.END))
if choice.lower() == "q" or choice == "":
return None
elif choice.isdigit():
val = int(choice) - 1
if val < len(candidates) and val >= 0:
return candidates[val]
print("Invalid selection '%s'." % choice)
def move(self, files, directory):
"""
Move files to their proper place
We look for a directory inside the given directory that matches the show
name, and then a directory inside that that matches the season number.
For example, if we have a show awesome.show.s02e02.HDTV.x264-GRP.mp4 we
will try to find a directory matching r"awesome.+show.*" and then a
directory S02 inside that. Something like "Awesome Show/S02/".
"""
for (fname, info) in [(f, self.split_name(f)) for f in files]:
if not info:
print("Unable to parse %s" % fname)
else:
(name, season, episode, extension) = info
new_name = "S%02dE%02d.%s" % (season, episode, extension)
pattern = os.path.basename(name).replace('.', '.+') + '.*'
result = self.find_candidates(directory, pattern)
if len(result) == 0:
Colour.red("Couldn't determine destination for %s." %
os.path.basename(fname))
continue
elif len(result) > 1:
Colour.white("Couldn't determine desination for %s." %
os.path.basename(fname))
destination = self.ask_candidates(result)
if destination == None:
continue
else:
destination = result[0]
destination = os.path.join(destination, "S%02d" % season)
if os.path.isdir(destination):
self.confirm_move(fname, os.path.join(destination, new_name))
else:
print("No directory S%02d in %s" % (season, destination))
def rename(self, files):
"""Rename files to their proper names"""
for (fname, info) in [(f, self.split_name(f)) for f in files]:
if not info:
print("Unable to parse %s", fname)
else:
(name, season, episode, extension) = info
new_name = "S%02dE%02d.%s" % (season, episode, extension)
new_path =
|
os.path.join(os.path.dirname(name), new_name)
self.confirm_move(fname, new_path)
def confirm_move(self, src, dest):
|
"""
Move file from src to dest
* If args.dry_run is True, then we don't actually do anythin.
* If args.confirm is True, then we ask first.
"""
if os.path.isfile(dest):
Colour.red("Not moving %s to %s as file already exists" % (src, dest))
elif os.path.isdir(dest):
Colour.red("Not moving %s to %s as it is a directory" % (src, dest))
else:
if self.args.dry_run:
Colour.white("(Not) ", end="")
print("Moving %s to %s" % (src, dest))
if self.args.dry_run:
return
elif self.args.confirm:
ans = raw_input("%sOK? [Y/n/q] %s" % (Colour.WHITE, Colour.END))
if ans.lower() == "q":
sys.exit(0)
elif ans.lower() != "y" and ans != "":
print("Skipping %s..." % src)
return
shutil.move(src, dest)
def split_name(self, fname):
"""Split a file name, fname, into its consituent parts"""
match = self.SE_REGEX.match(fname)
if match:
season, episode = int(match.group(2)), int(match.group(3))
return (match.group(1), season, episode, match.groups()[-1])
else:
match = self.ALT_REGEX.match(fname)
if match:
digits = match.group(2)
season, episode = int(digits[:-2]), int(digits[-2:])
return (match.group(1), season, episode, match.groups()[-1])
else:
return None
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--dry-run', help='just print what would be done',
action='store_true', dest='dry_run')
parser.add_argument('-c', '--confirm', help='confirm before doing',
action='store_true')
parser.add_argument('files', nargs='+')
args = parser.parse_args()
renamer = Renamer(args)
if len(args.files) > 1:
if not os.path.isdir(args.files[-1]):
print('Last argument is not a directory')
sys.exit(1)
else:
renamer.move(args.files[0:-1], args.files[-1])
else:
renamer.rename(args.files)
if __name__ == "__main__":
main()
|
jacoboariza/BotIntendHub
|
yahooWeatherForecast.py
|
Python
|
apache-2.0
| 335
| 0.00597
|
#!/usr/bin/env python
def makeYqlQuery(req):
result = req.get("result")
parameters = result.get("parameters")
city = parameters.get("geo-city")
if city is None:
return None
return "select * from weather.forecast where woeid in (select woeid fro
|
m geo.places(1) where text='" + city + "') and
|
u='c'"
|
mldbai/mldb
|
testing/plugin_delete_test.py
|
Python
|
apache-2.0
| 856
| 0.005841
|
# This file is part of MLDB. Copyright 201
|
5 mldb.ai inc. All rights reserved.
pythonScript = {
"type": "python",
"params": {
"address": "",
"source": """
mldb.log("Constructing plugin!")
def requestHandler(mldb, remaining, verb, resource, restParams, payload, contentType, contentLength, headers):
mldb.log("waqlsajf;lasdf")
print "Handling route in python"
if verb == "GET" and remaining == "/miRoute":
return "bouya!"
mldb.plugin.set_request_handler(requestHandler)
"""
}
}
request.set_return
|
("hoho")
mldb.log("ouin")
mldb.log(str(mldb.perform("PUT", "/v1/plugins/plugToDel", [["sync", "true"]], pythonScript)))
mldb.log(str(mldb.perform("GET", "/v1/plugins", [], {})))
rtn= mldb.perform("GET", "/v1/plugins/plugToDel/routes/miRoute", [], {})
print(rtn)
request.set_return(rtn["response"])
|
noamelf/Open-Knesset
|
laws/management/commands/freeze_bills.py
|
Python
|
bsd-3-clause
| 1,347
| 0
|
from __future__ import print_function
from django.core.management.base import BaseCommand
from optparse import make_option
from laws.models import Bill
from laws.vote_choices import BILL_STAGE_CHOICES
from mks.models import Knesset
class Command(BaseCommand):
help = "Freeze bills staged in previous knessets"
option_list = BaseCommand.option_list + (
make_option(
'-n', action='store_true', dest="dryrun", default=False,
help='Dry run, changes nothing in the db, just display results'
),
)
def handle(self, *args, **options):
start_date = Knesset.objects.current_knesset().start_date
valid_stages = [key for (key, val) in BILL_STAGE_CHOICES
if key.isnumeric() and 1 < int(key) < 6]
bills = Bill.objects.filter(stage_date__lte=start_date,
stage__in=valid_stages)
total = Bill.objects.count()
found = bills.count()
msg = "Found {0} bills of {1} in stages {2} and dated before {3}"
print(msg.format(found, total, u','.join(valid_stages), start_date))
|
if options['dryrun']:
print("Not updating the db, dry run was specified")
else:
print('Settings {0} bills stage to u"0"'.format(found))
bills.
|
update(stage=u'0')
|
alexeyshulzhenko/OBDZ_Project
|
OnlineAgecy/urls.py
|
Python
|
gpl-3.0
| 3,927
| 0.005348
|
#!python
# log/urls.py
from django.conf.urls import url
from . import views
# We are adding a URL called /home
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'^clients/$', views.clients, name='clients'),
url(r'^clients/(?P<id>\d+)/$', views.client_detail, name='client_detail'),
url(r'^clients/new/$', views.client_new, name='client_new'),
url(r'^c
|
lients/(?P<id>\d+)/edit/$', views.client_edit, name='client_edit'),
url(r'^clients/sevices/$', views.clients_services_count, name='clients_services_count'),
url(r'^clients/bills/(?P<id>\d+)/$', views.all_clients_bills, name='all_clients_bil
|
ls'),
url(r'^clients/bills/$', views.fresh_clients, name='fresh_clients'),
url(r'^clients/del/(?P<id>\d+)/$', views.delete_client, name='delete_client'),
url(r'^contracts/$', views.contracts, name='contracts'),
url(r'^contracts/(?P<id>\d+)/$', views.contract_detail, name='contract_detail'),
url(r'^contracts/new/$', views.contract_new, name='contract_new'),
url(r'^contracts/(?P<id>\d+)/edit/$', views.contract_edit, name='contract_edit'),
url(r'^contracts/list/(?P<id>\d+)/$', views.all_clients_contracts, name='all_clients_contracts'),
url(r'^contracts/list/$', views.contracts_services, name='contracts_services'),
url(r'^contracts/del/(?P<id>\d+)/$', views.delete_contract, name='delete_contract'),
url(r'^manager/$', views.managers, name='managers'),
url(r'^manager/(?P<id>\d+)/$', views.manager_detail, name='manager_detail'),
url(r'^manager/new/$', views.manager_new, name='manager_new'),
url(r'^manager/(?P<id>\d+)/edit/$', views.manager_edit, name='manager_edit'),
url(r'^manager/clients/$', views.managers_clients_count, name='managers_clients_count'),
url(r'^managers/del/(?P<id>\d+)/$', views.delete_manager, name='delete_manager'),
url(r'^briefs/$', views.brief, name='briefs'),
url(r'^briefs/(?P<id>\d+)/$', views.brief_detail, name='brief_detail'),
url(r'^briefs/new/$', views.brief_new, name='brief_new'),
url(r'^briefs/(?P<id>\d+)/edit/$', views.brief_edit, name='brief_edit'),
url(r'^briefs/del/(?P<id>\d+)/$', views.delete_brief, name='delete_brief'),
url(r'^briefs/list/(?P<id>\d+)/$', views.all_clients_briefs, name='all_clients_briefs'),
url(r'^services/$', views.services, name='services'),
url(r'^services/(?P<id>\d+)/$', views.service_detail, name='service_detail'),
url(r'^services/new/$', views.services_new, name='services_new'),
url(r'^services/(?P<id>\d+)/edit/$', views.service_edit, name='service_edit'),
url(r'^services/table/(?P<id>\d+)/$', views.service_all_clients, name='service_all_clients'),
url(r'^services/del/(?P<id>\d+)/$', views.delete_service, name='delete_service'),
url(r'^contractors/$', views.contractors, name='contractors'),
url(r'^contractors/(?P<id>\d+)/$', views.contractor_detail, name='contractor_detail'),
url(r'^contractors/new/$', views.contractors_new, name='contractors_new'),
url(r'^contractors/(?P<id>\d+)/edit/$', views.contractor_edit, name='contractor_edit'),
url(r'^contractors/newest/$', views.newest_contractors, name='newest_contractors'),
url(r'^contractors/del/(?P<id>\d+)/$', views.delete_contractor, name='delete_contractor'),
url(r'^acts/$', views.acts, name='acts'),
url(r'^acts/(?P<id>\d+)/$', views.act_detail, name='act_detail'),
url(r'^acts/new/$', views.act_new, name='act_new'),
url(r'^acts/(?P<id>\d+)/edit/$', views.act_edit, name='act_edit'),
url(r'^acts/del/(?P<id>\d+)/$', views.delete_act, name='delete_act'),
url(r'^bills/$', views.bills, name='bills'),
url(r'^bills/(?P<id>\d+)/$', views.bills_detail, name='bills_detail'),
url(r'^bills/new/$', views.bills_new, name='bills_new'),
url(r'^bills/(?P<id>\d+)/edit/$', views.bills_edit, name='bills_edit'),
url(r'^bill/del/(?P<id>\d+)/$', views.delete_bill, name='delete_bill'),
]
|
apache/incubator-cotton
|
mysos/executor/shell_utils.py
|
Python
|
apache-2.0
| 3,152
| 0.011421
|
"""Set of utility functions for working with OS commands.
Functions in this module return the command string. These commands are composed but not executed.
"""
import os
from subprocess import call
HADOOP_CONF_DIR = '/etc/hadoop/conf'
def encrypt(key_file):
"""
Encrypt the data from stdin and write output to stdout.
:param key_file: The key file used to encrypt the stream.
"""
if not os.path.isfile(key_file):
raise ValueError("Cannot find key_file: %" % key_file)
return "openssl aes-256-cbc -salt -pass file:%s" % key_file
def decrypt(key_file):
"""
Decrypt the data from stdin and write output to stdout.
:param key_file: The key file used to decrypt the stream.
"""
if not os.path.isfile(key_file):
raise ValueError("Cannot find key_file: %" % key_file)
return "openssl aes-256-cbc -d -pass file:%s" % key_file
def compress(extension):
"""
Compress the data from stdin and write output to stdout.
:param extension: The compression format identified by the file extension. Allowed values are:
'gz' for gzip, 'bz' or 'bz2' for bzip.
"""
if extension == "gz":
cmd = "pigz" if exists("pigz") else "gzip"
elif extension == "bz" or extension == "bz2":
cmd = "bzip2"
elif extension == 'lzo':
cmd = "lzop"
else:
raise ValueError("Unknown compression format/file extension")
return cmd
def decompress(extension):
"""
Decompress the data from stdin and write output to stdout.
:param extension: The compression format identified by the file extension. Allowed values are:
'gz' for gzip, 'bz' or 'bz2' for bz
|
ip.
"""
if extension == "gz":
cmd = "pigz -d" if exists("pigz") else "gzip -d"
|
elif extension == "bz" or extension == "bz2":
cmd = "bzip2 -d"
elif extension == 'lzo':
cmd = "lzop -d"
else:
raise ValueError("Unknown compression format/file extension")
return cmd
def hdfs_cat(uri, conf=HADOOP_CONF_DIR):
"""
Fetch the data from the specified uri and write output to stdout.
:param uri: The HDFS URI.
:param conf: The hadoop config directory.
"""
return "hadoop --config %s dfs -cat %s" % (conf, uri)
def pv(size):
"""
Monitor the progress of data through a pipe. If 'pv' is not available, simply 'cat' it.
:param size: The size of the data, to calculate percentage.
"""
if exists('pv'):
return "pv --wait --size %s" % size
else:
return "cat"
def untar(directory):
"""
Untar the data from stdin into the specified directory.
:param directory: The directory to write files to.
"""
return "tar -C %s -x" % directory
def tar(path):
"""
Tar the path and write output to stdout.
:param path: All contents under path are 'tar'ed.
"""
if not os.path.exists(path):
raise ValueError("Invalid argument: 'path' doesn't exist")
path = path.rstrip(os.sep)
parent, base = os.path.split(path)
return "tar -C %s %s" % (parent, base)
def exists(cmd):
"""Return true if 'cmd' exists in $PATH."""
with open(os.devnull, "w") as f:
return call(['which', cmd], stdout=f) == 0 # No stdout.
|
qsnake/gpaw
|
gpaw/xc/hybridk.py
|
Python
|
gpl-3.0
| 14,413
| 0.002637
|
# Copyright (C) 2010 CAMd
# Please see the accompanying LICENSE file for further information.
"""This module provides all the classes and functions associated with the
evaluation of exact exchange with k-point sampling."""
from math import pi, sqrt
import numpy as np
from ase import Atoms
from gpaw.xc import XC
from gpaw.xc.kernel import XCNull
from gpaw.xc.functional import XCFunctional
from gpaw.utilities import hartree, pack, unpack2, packed_index
from gpaw.lfc import LFC
from gpaw.wavefunctions.pw import PWDescriptor
from gpaw.kpt_descriptor import KPointDescriptor
from gpaw.kpoint import KPoint as KPoint0
from gpaw.mpi import world
class KPoint:
def __init__(self, kd, kpt=None):
"""Helper class for parallelizing over k-points.
Placeholder for wave functions, occupation numbers,
projections, and global k-point index."""
self.kd = kd
if kpt is not None:
self.psit_nG = kpt.psit_nG
self.f_n = kpt.f_n
self.P_ani = kpt.P_ani
self.k = kpt.k
self.s = kpt.s
self.requests = []
def next(self):
"""Create empty object.
Data will be received from other processor."""
kpt = KPoint(self.kd)
# intialize array for receiving:
kpt.psit_nG = np.empty_like(self.psit_nG)
kpt.f_n = np.empty_like(self.f_n)
# Total number of projector functions:
I = sum([P_ni.shape[1] for P_ni in self.P_ani.values()])
kpt.P_In = np.empty((I, len(kpt.f_n)), complex)
kpt.P_ani = {}
I1 = 0
for a, P_ni in self.P_ani.items():
I2 = I1 + P_ni.shape[1]
kpt.P_ani[a] = kpt.P_In[I1:I2].T
I1 = I2
kpt.k = (self.k + 1) % self.kd.nibzkpts
kpt.s = self.s
return kpt
def start_sending(self, rank):
P_In = np.concatenate([P_ni.T for P_ni in self.P_ani.values()])
self.requests += [
self.kd.comm.send(self.psit_nG, rank, block=False, tag=1),
self.kd.comm.send(self.f_n, rank, block=False, tag=2),
self.kd.comm.send(P_In, rank, block=False, tag=3)]
def start_receiving(self, rank):
self.requests += [
self.kd.comm.receive(self.psit_nG, rank, block=False, tag=1),
self.kd.comm.receive(self.f_n, rank, block=False, tag=2),
self.kd.comm.receive(self.P_In, rank, block=False, tag=3)]
def wait(self):
self.kd.comm.waitall(self.requests)
self.requests = []
class HybridXC(XCFunctional):
orbital_dependent = True
def __init__(self, name, hybrid=None, xc=None, finegrid=False,
alpha=None):
"""Mix standard functionals with exact exchange.
name: str
Name of hybrid functional.
hybrid: float
Fraction of exact exchange.
xc: str or XCFunctional object
Standard DFT functional with scaled down exchange.
finegrid: boolean
Use fine grid for energy functional evaluations?
"""
if name == 'EXX':
assert hybrid is None and xc is None
hybrid = 1.0
xc = XC(XCNull())
elif name == 'PBE0':
assert hybrid is None and xc is None
hybrid = 0.25
xc = XC('HYB_GGA_XC_PBEH')
elif name == 'B3LYP':
assert hybrid is None and xc is None
hybrid = 0.2
xc = XC('HYB_GGA_XC_B3LYP')
if isinstance(xc, str):
xc = XC(xc)
self.hybrid = hybrid
self.xc = xc
self.type = xc.type
self.alpha = alpha
self.exx = 0.0
XCFunctional.__init__(self, name)
def get_setup_name(self):
return 'PBE'
def calculate_radial(self, rgd, n_sLg, Y_L, v_sg,
dndr_sLg=None, rnablaY_Lv=None,
tau_sg=None, dedtau_sg=None):
return self.xc.calculate_radial(rgd, n_sLg, Y_L, v_sg,
dndr_sLg, rnablaY_Lv)
def initialize(self, density, hamiltonian, wfs, occupations):
self.xc.initialize(density, hamiltonian, wfs, occupations)
self.nspins = wfs.nspins
self.setups = wfs.setups
self.density = density
self.kpt_u = wfs.kpt_u
self.gd = density.gd
self.kd = wfs.kd
self.bd = wfs.bd
N_c = self.gd.N_c
N = self.gd.N_c.prod()
vol = self.gd.dv * N
if self.alpha is None:
self.alpha = 6 * vol**(2 / 3.0) / pi**2
self.gamma = (vol / (2 * pi)**2 * sqrt(pi / self.alpha) *
self.kd.nbzkpts)
ecut = 0.5 * pi**2 / (self.gd.h_cv**2).sum(1).max()
if self.kd.N_c is None:
self.bzk_kc = np.zeros((1, 3))
dfghdfgh
else:
n = self.kd.N_c * 2 - 1
bzk_kc = np.indices(n).transpose((1, 2, 3, 0))
bzk_kc.shape = (-1, 3)
bzk_kc -= self.kd.N_c - 1
self.bzk_kc = bzk_kc.astype(float) / self.kd.N_c
self.pwd = PWDescriptor(ecut, self.gd, self.bzk_kc)
n = 0
for k_c, Gpk2_G in zip(self.bzk_kc[:], self.pwd.G2_qG):
if (k_c > -0.5).all() and (k_c <= 0.5).all(): #XXX???
if k_c.any():
self.gamma -= np.dot(np.exp(-self.alpha * Gpk2_G),
Gpk2_G**-1)
else:
self.gamma -= np.dot(np.exp(-self.alpha * Gpk2_G[1:]),
Gpk2_G[1:]**-1)
n += 1
assert n == self.kd.N_c.prod()
self.ghat = LFC(self.gd,
[setup.ghat_l for setup in density.setups],
dtype=complex
)
self.ghat.set_k_points(self.bzk_kc)
self.fullkd = KPointDescriptor(self.kd.bzk_kc, nspins=1)
class S:
id_a = []
def set_symmetry(self, s): pass
self.fullkd.set_symmetry(Atoms(pbc=True), S(), False)
self.fullkd.set_communicator(world)
self.pt = LFC(self.gd, [setup.pt_j for setup in density.setups],
dtype=complex)
self.pt.set_k_points(self.fullkd.ibzk_kc)
self.interpolator = density.interpolator
def set_positions(self, spos_ac):
self.ghat.set_positions(spos_ac)
self.pt.set_positions(spos_ac)
def calculate(self, gd, n_sg, v_sg=None, e_g=None):
# Normal XC contribution:
exc = self.xc.calculate(gd, n_sg, v_sg, e_g)
# Add EXX contribution:
return exc + self.exx
def calculate_exx(self):
"""Non-selfconsistent calculation."""
kd = self.kd
K = self.fullkd.nibzkpts
assert self.nspins == 1
Q = K // world.size
assert Q * world.size == K
parallel = (world.size > self.nspins)
self.exx = 0.0
self.exx_skn = np.zeros((self.nspins, K, self.bd.nbands))
kpt_u = []
for k in range(world.rank * Q, (world.rank + 1) * Q):
k_c = self.fullkd.ibzk_kc[k]
for k1, k1_c in enumerate(kd.bzk_kc):
if abs(k1_c - k_c).max() < 1e-10:
break
# Index of symmetry related point in the irreducible BZ
ik = kd.kibz_k[k1]
kpt = self.kpt_u[ik]
# KPoint from ground-state calculation
|
phase_cd = np.exp(2j * pi * self.gd.sdisp_cd * k_c[:, np.newaxis])
kpt2 = KPoint0(kpt.weight,
|
kpt.s, k, None, phase_cd)
kpt2.psit_nG = np.empty_like(kpt.psit_nG)
kpt2.f_n = kpt.f_n / kpt.weight / K * 2
for n, psit_G in enumerate(kpt2.psit_nG):
psit_G[:] = kd.transform_wave_function(kpt.psit_nG[n], k1)
kpt2.P_ani = self.pt.dict(len(kpt.psit_nG))
self.pt.integrate(kpt2.psit_nG, kpt2.P_ani, k)
kpt_u.a
|
skarllot/open-tran
|
lib/stem/snowball.py
|
Python
|
gpl-2.0
| 136,128
| 0.004988
|
# -*- coding: utf-8 -*-
#
# Natural Language Toolkit: Snowball Stemmer
#
# Copyright (C) 2001-2010 NLTK Project
# Author: Peter Michael Stahl <pemistahl@gmail.com>
# Algorithms: Dr Martin Porter <martin@tartarus.org>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
u"""
Snowball stemmers and appendant demo function
This module provides a port of the Snowball stemmers
developed by U{Dr Martin Porter<http://tartarus.org/~martin/>}.
There is also a demo function demonstrating the different
algorithms. It can be invoked directly on the command line.
For more information take a look into the class C{SnowballStemmer}.
@author: Peter Michael Stahl
@contact: pemistahl@gmail.com
@contact: U{http://twitter.com/pemistahl}
"""
from api import *
#from nltk.corpus import stopwords
class SnowballStemmer(StemmerI):
u"""
A word stemmer based on the Snowball stemming algorithms.
At the moment, this port is able to stem words from thirteen
languages: Danish, Dutch, Finnish, French, German,
Hungarian, Italian, Norwegian, Portuguese, Romanian, Russian,
Spanish and Swedish.
The algorithms have been developed by
U{Dr Martin Porter<http://tartarus.org/~martin/>}.
These stemmers are called Snowball, because he invented
a programming language with this name for creating
new stemming algorithms. There is more information available
on the U{Snowball Website<http://snowball.tartarus.org/>}.
The stemmer is invoked as shown below:
>>> from snowball import SnowballStemmer
>>> SnowballStemmer.languages # See which languages are supported
('danish', 'dutch', 'finnish', 'french', 'german', 'hungarian',
'italian', 'norwegian', 'portuguese", 'romanian', 'russian',
'spanish', 'swedish')
>>> stemmer = SnowballStemmer("german") # Choose a language
>>> stemmer.stem(u"Autobahnen") # Stem a word
u'autobahn'
@author: Peter Michael Stahl
@contact: pemistahl@gmail.com
@contact: U{http://twitter.com/pemistahl}
@cvar languages: A tuple that contains the available language names
@type languages: C{tuple}
@ivar stopwords: A list that contains stopwords for the respective lang
|
uage
in Uni
|
code format.
@type stopwords: C{list}
"""
languages = ("danish", "dutch", "finnish", "french", "german", "hungarian",
"italian", "norwegian", "portuguese", "romanian", "russian",
"spanish", "swedish")
def __new__(cls, language, **kwargs):
u"""
Override the constructor of class L{SnowballStemmer} in order to create
an instance of the language's respective subclass.
@param language: The language whose subclass is instantiated.
@type language: C{str, unicode}
@param kwargs: An arbitrary argument list for keyword arguments.
@type kwargs: C{dict}
@return: An instance of the language's respective subclass.
@rtype: C{class}
@raise ValueError: If there is no stemmer for the specified
language, a C{ValueError} is raised.
"""
if language == "danish":
return StemmerI.__new__(DanishStemmer)
elif language == "dutch":
return StemmerI.__new__(DutchStemmer)
elif language == "finnish":
return StemmerI.__new__(FinnishStemmer)
elif language == "french":
return StemmerI.__new__(FrenchStemmer)
elif language == "german":
return StemmerI.__new__(GermanStemmer)
elif language == "hungarian":
return StemmerI.__new__(HungarianStemmer)
elif language == "italian":
return StemmerI.__new__(ItalianStemmer)
elif language == "norwegian":
return StemmerI.__new__(NorwegianStemmer)
elif language == "portuguese":
return StemmerI.__new__(PortugueseStemmer)
elif language == "romanian":
return StemmerI.__new__(RomanianStemmer)
elif language == "russian":
return StemmerI.__new__(RussianStemmer)
elif language == "spanish":
return StemmerI.__new__(SpanishStemmer)
elif language == "swedish":
return StemmerI.__new__(SwedishStemmer)
else:
raise ValueError(u"The language '%s' is not supported."
% language)
def __init__(self, language, ignore_stopwords=False):
u"""
Create an instance of the Snowball stemmer.
@param language: The language that is applied for stemming.
@type language: C{str, unicode}
@param ignore_stopwords: If set to C{True}, stopwords are
not stemmed and returned unchanged.
Set to C{False} by default.
@type ignore_stopwords: C{bool}
"""
# if ignore_stopwords:
# if language == "romanian":
# raise ValueError(u"The Romanian stemmer has not yet" +
# u" a list of stopwords. Please set" +
# u" u'ignore_stopwords' to u'False'.")
# else:
# self.stopwords = [word.decode("utf-8") for word in
# stopwords.words(language)]
# else:
self.stopwords = set()
def __repr__(self):
u"""
Print out the string representation of the respective class.
"""
return "<%s>" % type(self).__name__
class _ScandinavianStemmer(SnowballStemmer):
u"""
This subclass encapsulates a method for defining the string region R1.
It is used by the Danish, Norwegian, and Swedish stemmer.
"""
def _r1_scandinavian(self, word, vowels):
u"""
Return the region R1 that is used by the Scandinavian stemmers.
R1 is the region after the first non-vowel following a vowel,
or is the null region at the end of the word if there is no
such non-vowel. But then R1 is adjusted so that the region
before it contains at least three letters.
@param word: The word whose region R1 is determined.
@type word: C{str, unicode}
@param vowels: The vowels of the respective language that are
used to determine the region R1.
@type vowels: C{unicode}
@return: C{r1}, the region R1 for the respective word.
@rtype: C{unicode}
@note: This helper method is invoked by the respective stem method of
the subclasses L{DanishStemmer}, L{NorwegianStemmer}, and
L{SwedishStemmer}. It is not to be invoked directly!
"""
r1 = u""
for i in xrange(1, len(word)):
if word[i] not in vowels and word[i-1] in vowels:
if len(word[:i+1]) < 3 and len(word[:i+1]) > 0:
r1 = word[3:]
elif len(word[:i+1]) >= 3:
r1 = word[i+1:]
else:
return word
break
return r1
class _StandardStemmer(SnowballStemmer):
u"""
This subclass encapsulates two methods for defining the standard versions
of the string regions R1, R2, and RV.
"""
def _r1r2_standard(self, word, vowels):
u"""
Return the standard interpretations of the string regions R1 and R2.
R1 is the region after the first non-vowel following a vowel,
or is the null region at the end of the word if there is no
such non-vowel.
R2 is the region after the first non-vowel following a vowel
in R1, or is the null region at the end of the word if there
is no such non-vowel.
@param word: The word whose regions R1 and R2 are determined.
@type word: C{str, unicode}
@param vowels: The vowels of the respective language that are
used to determine the regions R1 and R2.
@type vowels: C{unicode}
@ret
|
hguemar/cinder
|
cinder/zonemanager/drivers/cisco/cisco_fc_zone_driver.py
|
Python
|
apache-2.0
| 23,043
| 0
|
# (c) Copyright 2014 Cisco Systems Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Cisco Zone Driver is responsible to manage access control using FC zoning
for Cisco FC fabrics.
This is a concrete implementation of FCZoneDriver interface implementing
add_connection and delete_connection interfaces.
**Related Flags**
:zone_activate: Used by: class: 'FCZoneDriver'. Defaults to True
:zone_name_prefix: Used by: class: 'FCZoneDriver'. Defaults to 'openstack'
"""
from oslo.utils import excutils
from oslo.utils import importutils
from oslo_concurrency import lockutils
from oslo_config import cfg
import six
from cinder import exception
from cinder.i18n import _, _LE, _LI
from cinder.openstack.common import log as logging
from cinder.zonemanager.drivers.cisco import cisco_fabric_opts as fabric_opts
from cinder.zonemanager.drivers.fc_zone_driver import FCZoneDriver
from cinder.zonemanager.utils import get_formatted_wwn
LOG = logging.getLogger(__name__)
cisco_opts = [
cfg.StrOpt('cisco_sb_connector',
default='cinder.zonemanager.drivers.cisco'
'.cisco_fc_zone_client_cli.CiscoFCZoneClientCLI',
help='Southbound connector for zoning operation'),
]
CONF = cfg.CONF
CONF.register_opts(cisco_opts, 'fc-zone-manager')
class CiscoFCZoneDriver(FCZoneDriver):
"""Cisco FC zone driver implementation.
OpenStack Fibre Channel zone driver to manage FC zoning in
Cisco SAN fabrics.
Version history:
1.0 - Initial Cisco FC zone driver
"""
VERSION = "1.0.0"
def __init__(self, **kwargs):
super(CiscoFCZoneDriver, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
if self.configuration:
self.configuration.append_config_values(cisco_opts)
# Adding a hack to handle parameters from super classes
# in case configured with multi backends.
fabric_names = self.configuration.safe_get('fc_fabric_names')
activate = self.configuration.safe_get('cisco_zone_activate')
prefix = self.configuration.safe_get('cisco_zone_name_prefix')
base_san_opts = []
if not fabric_names:
base_san_opts.append(
cfg.StrOpt('fc_fabric_names', default=None,
help='Comma separated list of fibre channel '
'fabric names. This list of names is used to'
' retrieve other SAN credentials for connecting'
' to each SAN fabric'
))
if not activate:
base_san_opts.append(
cfg.BoolOpt('cisco_zone_activate',
default=True,
help='Indicates whether zone should '
'be activated or not'))
if not prefix:
base_san_opts.append(
cfg.StrOpt('cisco_zone_name_prefix',
default="openstack",
help="A prefix to be used when naming zone"))
|
if len(base_san_opts) > 0:
CONF.register_opts(base_san_opts)
self.configuration.append_config_values(base_san_opts)
fabric_names = [x.strip() for x in self.
configuration.fc_fabric_names.split(',')]
# There can be more than one SAN in the network and we need to
# get credentials for each SAN.
if fabric_names:
|
self.fabric_configs = fabric_opts.load_fabric_configurations(
fabric_names)
@lockutils.synchronized('cisco', 'fcfabric-', True)
def add_connection(self, fabric, initiator_target_map):
"""Concrete implementation of add_connection.
Based on zoning policy and state of each I-T pair, list of zone
members are created and pushed to the fabric to add zones. The
new zones created or zones updated are activated based on isActivate
flag set in cinder.conf returned by volume driver after attach
operation.
:param fabric: Fabric name from cinder.conf file
:param initiator_target_map: Mapping of initiator to list of targets
"""
LOG.debug("Add connection for Fabric:%s", fabric)
LOG.info(_LI("CiscoFCZoneDriver - Add connection "
"for I-T map: %s"), initiator_target_map)
fabric_ip = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_address')
fabric_user = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_user')
fabric_pwd = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_password')
fabric_port = self.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_port')
zoning_policy = self.configuration.zoning_policy
zoning_policy_fab = self.fabric_configs[fabric].safe_get(
'cisco_zoning_policy')
if zoning_policy_fab:
zoning_policy = zoning_policy_fab
zoning_vsan = self.fabric_configs[fabric].safe_get('cisco_zoning_vsan')
LOG.info(_LI("Zoning policy for Fabric %s"), zoning_policy)
statusmap_from_fabric = self.get_zoning_status(
fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan)
if statusmap_from_fabric.get('session') == 'none':
cfgmap_from_fabric = self.get_active_zone_set(
fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan)
zone_names = []
if cfgmap_from_fabric.get('zones'):
zone_names = cfgmap_from_fabric['zones'].keys()
# based on zoning policy, create zone member list and
# push changes to fabric.
for initiator_key in initiator_target_map.keys():
zone_map = {}
initiator = initiator_key.lower()
t_list = initiator_target_map[initiator_key]
if zoning_policy == 'initiator-target':
for t in t_list:
target = t.lower()
zone_members = [get_formatted_wwn(initiator),
get_formatted_wwn(target)]
zone_name = (self.
configuration.cisco_zone_name_prefix
+ initiator.replace(':', '')
+ target.replace(':', ''))
if (len(cfgmap_from_fabric) == 0 or (
zone_name not in zone_names)):
zone_map[zone_name] = zone_members
else:
# This is I-T zoning, skip if zone exists.
LOG.info(_LI("Zone exists in I-T mode. "
"Skipping zone creation %s"),
zone_name)
elif zoning_policy == 'initiator':
zone_members = [get_formatted_wwn(initiator)]
for t in t_list:
target = t.lower()
zone_members.append(get_formatted_wwn(target))
zone_name = self.configuration.cisco_zone_name_prefix \
+ initiator.replace(':', '')
|
wkmanire/StructuresAndAlgorithms
|
pythonpractice/bubblesort.py
|
Python
|
gpl-3.0
| 819
| 0
|
# -*- coding:utf-8; mod
|
e:python -*-
"""
A terrible sorting algorithm
|----------------------+-----------------------+------------------|
| Best Time Complexity | Worst Time Complexity | Space Complexity |
|----------------------+--------------------
|
---+------------------|
| O(n^2) | O(n) | O(n) |
|----------------------+-----------------------+------------------|
"""
from random import randint
from helpers import random_array, print_array, swap
def bubble_sort(array):
for i in range(len(array) - 1):
for j in range(i, len(array)):
if array[j] < array[i]:
swap(array, i, j)
def main():
array = random_array(25)
print_array(array)
bubble_sort(array)
print_array(array)
if __name__ == "__main__":
main()
|
Erethon/synnefo
|
snf-cyclades-app/synnefo/logic/rapi.py
|
Python
|
gpl-3.0
| 54,880
| 0.003243
|
#
#
# Copyright (C) 2010, 2011 Google Inc.
# Copyright (C) 2013, GRNET S.A.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Ganeti RAPI client."""
# No Ganeti-specific modules should be imported. The RAPI client is supposed to
# be standalone.
import requests
import logging
import simplejson
import time
GANETI_RAPI_PORT = 5080
GANETI_RAPI_VERSION = 2
HTTP_DELETE = "DELETE"
HTTP_GET = "GET"
HTTP_PUT = "PUT"
HTTP_POST = "POST"
HTTP_OK = 200
HTTP_NOT_FOUND = 404
HTTP_APP_JSON = "application/json"
REPLACE_DISK_PRI = "replace_on_primary"
REPLACE_DISK_SECONDARY = "replace_on_secondary"
REPLACE_DISK_CHG = "replace_new_secondary"
REPLACE_DISK_AUTO = "replace_auto"
NODE_EVAC_PRI = "primary-only"
NODE_EVAC_SEC = "secondary-only"
NODE_EVAC_ALL = "all"
NODE_ROLE_DRAINED = "drained"
NODE_ROLE_MASTER_CANDIATE = "master-candidate"
NODE_ROLE_MASTER = "master"
NODE_ROLE_OFFLINE = "offline"
NODE_ROLE_REGULAR = "regular"
JOB_STATUS_QUEUED = "queued"
JOB_STATUS_WAITING = "waiting"
JOB_STATUS_CANCELING = "canceling"
JOB_STATUS_RUNNING = "running"
JOB_STATUS_CANCELED = "canceled"
JOB_STATUS_SUCCESS = "success"
JOB_STATUS_ERROR = "error"
JOB_STATUS_FINALIZED = frozenset([
JOB_STATUS_CANCELED,
JOB_STATUS_SUCCESS,
JOB_STATUS_ERROR,
])
JOB_STATUS_ALL = frozenset([
JOB_STATUS_QUEUED,
JOB_STATUS_WAITING,
JOB_STATUS_CANCELING,
JOB_STATUS_RUNNING,
]) | JOB_STATUS_FINALIZED
# Legacy name
JOB_STATUS_WAITLOCK = JOB_STATUS_WAITING
# Internal constants
_REQ_DATA_VERSION_FIELD = "__version__"
_QPARAM_DRY_RUN = "dry-run"
_QPARAM_FORCE = "force"
# Feature strings
INST_CREATE_REQV1 = "instance-create-reqv1"
INST_REINSTALL_REQV1 = "instance-reinstall-reqv1"
NODE_MIGRATE_REQV1 = "node-migrate-reqv1"
NODE_EVAC_RES1 = "node-evac-res1"
# Old feature constant names in case they're references by users of this module
_INST_CREATE_REQV1 = INST_CREATE_REQV1
_INST_REINSTALL_REQV1 = INST_REINSTALL_REQV1
_NODE_MIGRATE_REQV1 = NODE_MIGRATE_REQV1
_NODE_EVAC_RES1 = NODE_EVAC_RES1
#: Not enough resources (iallocator failure, disk space, memory, etc.)
ECODE_NORES = "insufficient_resources"
#: Temporarily out of resources; operation can be tried again
ECODE_TEMP_NORES = "temp_insufficient_resources"
class Error(Exception):
"""Base error class for this module.
"""
pass
class GanetiApiError(Error):
"""Generic error raised from Ganeti API.
"""
def __init__(self, msg, code=None):
Error.__init__(self, msg)
self.co
|
de = code
class CertificateError(GanetiApiError):
"""Raised when a problem is found with the SSL certificate.
"""
pass
def _AppendIf(container, condition, value):
"""Appends to a list if a conditi
|
on evaluates to truth.
"""
if condition:
container.append(value)
return condition
def _AppendDryRunIf(container, condition):
"""Appends a "dry-run" parameter if a condition evaluates to truth.
"""
return _AppendIf(container, condition, (_QPARAM_DRY_RUN, 1))
def _AppendForceIf(container, condition):
"""Appends a "force" parameter if a condition evaluates to truth.
"""
return _AppendIf(container, condition, (_QPARAM_FORCE, 1))
def _SetItemIf(container, condition, item, value):
"""Sets an item if a condition evaluates to truth.
"""
if condition:
container[item] = value
return condition
class GanetiRapiClient(object): # pylint: disable=R0904
"""Ganeti RAPI client.
"""
USER_AGENT = "Ganeti RAPI Client"
_json_encoder = simplejson.JSONEncoder(sort_keys=True)
def __init__(self, host, port=GANETI_RAPI_PORT,
username=None, password=None, logger=logging):
"""Initializes this class.
@type host: string
@param host: the ganeti cluster master to interact with
@type port: int
@param port: the port on which the RAPI is running (default is 5080)
@type username: string
@param username: the username to connect with
@type password: string
@param password: the password to connect with
@param logger: Logging object
"""
self._logger = logger
self._base_url = "https://%s:%s" % (host, port)
if username is not None:
if password is None:
raise Error("Password not specified")
elif password:
raise Error("Specified password without username")
self._auth = (username, password)
def _SendRequest(self, method, path, query, content):
"""Sends an HTTP request.
This constructs a full URL, encodes and decodes HTTP bodies, and
handles invalid responses in a pythonic way.
@type method: string
@param method: HTTP method to use
@type path: string
@param path: HTTP URL path
@type query: list of two-tuples
@param query: query arguments to pass to urllib.urlencode
@type content: str or None
@param content: HTTP body content
@rtype: str
@return: JSON-Decoded response
@raises CertificateError: If an invalid SSL certificate is found
@raises GanetiApiError: If an invalid response is returned
"""
assert path.startswith("/")
url = "%s%s" % (self._base_url, path)
headers = {}
if content is not None:
encoded_content = self._json_encoder.encode(content)
headers = {"content-type": HTTP_APP_JSON,
"accept": HTTP_APP_JSON}
else:
encoded_content = ""
if query is not None:
query = dict(query)
self._logger.debug("Sending request %s %s (query=%r) (content=%r)",
method, url, query, encoded_content)
req_method = getattr(requests, method.lower())
r = req_method(url, auth=self._auth, headers=headers, params=query,
data=encoded_content, verify=False)
http_code = r.status_code
if r.content is not None:
response_content = simplejson.loads(r.content)
else:
response_content = None
if http_code != HTTP_OK:
if isinstance(response_content, dict):
msg = ("%s %s: %s" %
(response_content["code"],
response_content["message"],
response_content["explain"]))
else:
msg = str(response_content)
raise GanetiApiError(msg, code=http_code)
return response_content
def GetVersion(self):
"""Gets the Remote API version running on the cluster.
@rtype: int
@return: Ganeti Remote API version
"""
return self._SendRequest(HTTP_GET, "/version", None, None)
def GetFeatures(self):
"""Gets the list of optional features supported by RAPI server.
@rtype: list
@return: List of optional features
"""
try:
return self._SendRequest(HTTP_GET, "/%s/features" % GANETI_RAPI_VERSION,
None, None)
except GanetiApiError, err:
# Older RAPI servers don't support this resource
if err.code == HTTP_NOT_FOUND:
return []
raise
def GetOperatingSystems(self):
"""Gets the Operating Systems running in the Ganeti cluster.
@rtype: list of str
@return: operating systems
"""
return self._SendRequest(HTTP_GET, "/%s/os" % GANETI_RAPI_VERSION,
None, None)
def GetInfo(self):
"""Gets info about the cluster.
@rtype: dict
@return: information about the cluster
"""
return self._SendRequest(HTTP_GET, "/%s/info" % GANETI_RAPI_VERSION,
None, None)
def RedistributeConfig(self):
"""Tells the cluster to redistribute its configuration files.
@rtype: string
@return: job id
"""
return self
|
naphthalene/fabric-bolt
|
fabric_bolt/launch_window/migrations/0001_initial.py
|
Python
|
mit
| 1,427
| 0.006307
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'LaunchWindow'
db.create_table(u'launch_window_launchwindow', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
|
('description', self.gf('django.db.models.fields.TextField')()),
('cron_format', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
))
|
db.send_create_signal(u'launch_window', ['LaunchWindow'])
def backwards(self, orm):
# Deleting model 'LaunchWindow'
db.delete_table(u'launch_window_launchwindow')
models = {
u'launch_window.launchwindow': {
'Meta': {'object_name': 'LaunchWindow'},
'cron_format': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['launch_window']
|
OstapHEP/ostap
|
ostap/frames/tree_reduce.py
|
Python
|
bsd-3-clause
| 12,435
| 0.052352
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
## @file ostap/frames/tree_reduce.py
# Helper module to "Reduce" tree using frames
# @see Ostap::DataFrame
# @see ROOT::RDataFrame
# @author Vanya BELYAEV Ivan.Belyaev@itep.ru
# @date 2018-06-16
# =============================================================================
"""Helper module to ``reduce'' tree using frames
- see Ostap.DataFrame
- see ROOT.ROOT.RDataFrame
"""
# =============================================================================
__version__ = "$Revision$"
__author__ = "Vanya BELYAEV Ivan.Belyaev@itep.ru"
__date__ = "2011-06-07"
__all__ = (
'ReduceTree' ,
'reduce' ,
)
# =============================================================================
import ROOT, os
# =============================================================================
# logging
# =============================================================================
from ostap.logger.logger import getLogger
if '__main__' == __name__ : logger = getLogger( 'ostap.frames.tree_reduce' )
else : logger = getLogger( __name__ )
# =============================================================================
logger.debug ( "``Reduce'' TTree using ROOT::RDataFrame object")
# =============================================================================
import ostap.trees.trees
from ostap.core.core import cpp, Ostap
from ostap.utils.cleanup import CleanUp
# =============================================================================
## @class ReduceTree
# Reduce TTree object using intermediate (temporary
# @code
|
# tree = ...
# r = ReduceTree ( tree , cuts , [ 'px', 'py', 'pz' ] , 'new_file.root' )
# reduced = t.tree
# @endcode
class ReduceTree(CleanUp):
"""Reduce ROOT.TTree object
>>> tree = ...
>>> r = ReduceTree ( tree , cuts
|
, [ 'px', 'py', 'pz' ]
>>> reduced = r.tree
"""
def __init__ ( self ,
chain , ## input TChain/TTree
selection = {} , ## selection/cuts
save_vars = () , ## list of variables to save
new_vars = {} , ## new variables
no_vars = () , ## exclude these variables
##
output = '' , ## output file name
name = '' , ## the name
addselvars = False , ## add varibles from selections?
tmp_keep = False , ## keep the temporary file
silent = False ): ## silent processing
from ostap.frames.frames import DataFrame
frame = DataFrame ( chain )
report = None
self.__frame_main = frame
if not silent :
pbar = frame.ProgressBar ( len ( chain ) )
nvars = []
## new variables
for nv in new_vars :
frame = frame.Define ( nv , new_vars [ nv] )
nvars.append ( nv )
from ostap.core.ostap_types import ( string_types ,
listlike_types ,
dictlike_types )
cut_types = string_types + ( ROOT.TCut , )
Lmax = 30
selections = []
if selection and isinstance ( selection , cut_types ) :
ss = str ( selection ).strip()
if len ( ss ) < Lmax : filter_name = ss
else : filter_name = 'SELECTION'
frame = frame.Filter ( ss , filter_name )
selections.append ( ss )
elif selection and isinstance ( selection , dictlike_types ) :
for filter_name in selection :
s = selection [ filter_name ]
assert isinstance ( s , cut_types ),\
'Invalid selection type %s/%s' % ( s , type ( s ) )
ss = str ( s ).strip()
frame = frame.Filter ( ss , str ( filter_name ) )
selections.append ( ss )
elif selection and isinstance ( selection , listlike_types ) :
for i , s in enumerate ( selection ) :
assert isinstance ( s , cut_types ),\
'Invalid selection type %s/%s' % ( s , type ( s ) )
ss = str( s ).strip()
##
if len ( ss ) < Lmax : filter_name = ss
else : filter_name = 'SELECTION%d' % i
#
frame = frame.Filter ( ss , filter_name )
selections.append ( ss )
elif selection :
raise TypeError('Invalid selection type %s/%s' % ( selection , type ( selection ) ) )
if not output :
output = self.tempfile ( prefix = 'ostap-frame-' , suffix = '.root' )
## logger.debug ( 'ReduceTree: output file is %s' % output )
if not tmp_keep : self.trash.add ( output )
## if selections : report = frame.Report()
if selections and addselvars :
bvars = chain.the_variables ( selections )
save_vars = list ( bvars ) + [ v for v in save_vars if not v in bvars ]
save_vars = tuple ( save_vars )
## exclude some variables
if no_vars and not save_vars :
bvars = list ( chain.branches () )
all_vars = list ( bvars ) + [ v for v in nvars if not v in bvars ]
save_vars = tuple ( [ v for v in all_vars if not v in no_vars ] )
elif no_vars :
bvars = chain.the_variables ( *save_vars )
all_vars = list ( bvars ) + [ v for v in nvars if not v in bvars ]
save_vars = tuple ( [ v for v in all_vars if not v in no_vars ] )
nb_ = len ( chain.branches () )
ne_ = len ( chain )
## chain name:
## FIXME!
# cname = chain.GetName() ## produces ROOT error
if not name :
_ , _ , cname = chain.GetName().rpartition ( '/' )
name = '%s_reduced' % cname
self.__name = name
if not save_vars :
snapshot = frame.Snapshot ( name , output )
else :
bvars = chain.the_variables ( *save_vars )
all_vars = list ( bvars ) + [ v for v in nvars if not v in bvars ]
from ostap.core.core import strings as _strings
all_vars = _strings ( all_vars )
snapshot = frame.Snapshot ( name , output , all_vars )
assert os.path.exists ( output ) and\
os.path.isfile ( output ) , 'Invalid file %s' % fname
self.__chain = ROOT.TChain ( name )
self.__chain.Add ( output )
self.__output = output
self.__report = 'Tree -> Frame -> Tree filter/transformation'
self.__table = []
if report :
from ostap.frames.frames import report_print, report_as_table
title = self.__report
self.__report += '\n%s' % report_print ( report , title , '# ')
self.__table = report_as_table ( report )
fs = os.path.getsize ( self.__output )
gb , r = divmod ( fs , 1024 * 1024 * 1024 )
mb , r = divmod ( r , 1024 * 1024 )
kb , r = divmod ( r , 1024 )
if gb : fs = '%.1fGB' % ( float ( fs ) / 1024 / 1024 / 1024 )
elif mb : fs = '%.1fMB' % ( float ( fs ) / 1024 / 1024 )
elif kb : fs = '%.1fkB' % ( float ( fs ) / 1024 )
else : fs = '%sB' % fs
nb = len ( self.__chain.branches () )
ne = len ( self.__chain )
self.__report += '\n# Reduce %d -> %d branches, %d -> %d entries' % ( nb_ , nb , ne_ , ne )
self.__report += '\n# Output:%s size:%s' % ( self.__output , fs )
self.__report += '\n# %s' % str ( self.__ch
|
luo2chun1lei2/AgileEditor
|
ve/src/VeUtils.py
|
Python
|
gpl-2.0
| 503
| 0.014458
|
#
|
-*- coding:utf-8 -*-
# 各种方便的工具。
import os, logging, threading
def is_empty(string):
# 判断是否空字符串,如果是空格之类的,也是empty。
# return:Bool:True,空 False,非空
if string is None or len(string) == 0:
return True
elif string.isspace():
return True
else:
return False
def is_not_empty(string):
# 判断是否空字符串。
# return:Bool:true,非空 false,空
return not is
|
_empty(string)
|
annarev/tensorflow
|
tensorflow/compiler/xla/python/xla_client.py
|
Python
|
apache-2.0
| 24,551
| 0.005539
|
# Lint as: python3
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""An XLA client in Python."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import atexit
import collections
import contextlib
import enum # pylint: disable=g-bad-import-order
import gzip
import inspect
import os
from typing import List, Sequence, Tuple, Union
from . import xla_extension as _xla
from absl import logging
import numpy as np
# Note this module does *not* depend on any Python protocol buffers. The XLA
# Python bindings are currently packaged both as part of jaxlib and as part
# of TensorFlow. If we use protocol buffers here, then importing both jaxlib
# and TensorFlow may fail with duplicate protocol buffer message definitions.
# Most functions are snake_case for consistency with other modules, some
# method names are CamelCase for consistency with XLA.
# pylint: disable=invalid-name
# Pylint has false positives for ty
|
pe annotations.
# pylint: disable=invalid-sequence-index
ops = _xla.ops
profiler = _xla.profiler
# Just an internal arbitrary increasing number to help with backward-compatible
# changes.
_version = 5
xla_platform_names = {
'cpu': 'Host',
'gpu': 'CUDA',
}
def _interpreter_backend_factor
|
y():
return _xla.get_interpreter_client()
def _cpu_backend_factory():
return _xla.get_cpu_client(asynchronous=True)
def _gpu_backend_factory(distributed_client=None, node_id=0):
"""Returns a GPU backend. BFC allocator is used by default."""
allocator = os.getenv('XLA_PYTHON_CLIENT_ALLOCATOR', 'default').lower()
memory_fraction = os.getenv('XLA_PYTHON_CLIENT_MEM_FRACTION')
preallocate = os.getenv('XLA_PYTHON_CLIENT_PREALLOCATE')
if allocator not in ('default', 'platform', 'bfc'):
raise ValueError(
'XLA_PYTHON_CLIENT_ALLOCATOR env var must be "default", "platform", or '
'"bfc", got "%s"' % allocator)
config = _xla.GpuAllocatorConfig()
if allocator == 'default':
config.kind = _xla.GpuAllocatorConfig.Kind.DEFAULT
if allocator == 'platform':
config.kind = _xla.GpuAllocatorConfig.Kind.PLATFORM
if allocator == 'bfc':
config.kind = _xla.GpuAllocatorConfig.Kind.BFC
if memory_fraction:
config.memory_fraction = float(memory_fraction)
config.preallocate = preallocate not in ('0', 'false', 'False')
return _xla.get_gpu_client(
asynchronous=True,
allocator_config=config,
distributed_client=distributed_client,
node_id=node_id)
def _tpu_backend_factory():
return _xla.get_tpu_client(asynchronous=True)
# Backend factories, keyed by user-visible name, in increasing priority order.
_local_backend_factories = collections.OrderedDict([
('interpreter', _interpreter_backend_factory),
('cpu', _cpu_backend_factory),
('gpu', _gpu_backend_factory),
('tpu', _tpu_backend_factory),
])
def register_local_backend_factory(name, factory):
_local_backend_factories[name] = factory
_local_backends = None
def _get_local_backends():
"""Instantiates all known local backends."""
global _local_backends
if _local_backends is not None:
return _local_backends
_local_backends = collections.OrderedDict()
for name, factory in _local_backend_factories.items():
logging.vlog(1, "Initializing backend '%s'" % name)
try:
backend = factory()
except RuntimeError as err:
if name == 'cpu':
# We always expect CPU to initialize successfully.
raise
else:
# If the backend isn't built into the binary, or if it has no devices,
# we expect a RuntimeError.
logging.vlog(1, "Error initializing backend '%s': %s" % (name, err))
continue
_local_backends[name] = backend
return _local_backends
def get_local_backend(name=None):
"""Returns a local backend.
Args:
name: the backend name. If `None`, a default local backend is returned,
typically `gpu` if one is present, or `cpu` if not. If a string, the named
backend is returned or an exception raised.
Returns:
A LocalBackend object.
"""
backends = _get_local_backends()
if name is not None:
try:
return backends[name]
except KeyError:
raise RuntimeError(
'Unknown backend %s. Available: %s' % (name, list(backends.keys())))
return list(backends.values())[-1]
class OpMetadata(object):
"""Python representation of a xla.OpMetadata protobuf."""
__slots__ = ('op_type', 'op_name', 'source_file', 'source_line')
def __init__(self, op_type='', op_name='', source_file='', source_line=0):
self.op_type = op_type
self.op_name = op_name
self.source_file = source_file
self.source_line = source_line
def CurrentSourceInfoMetadata(op_type=None, op_name=None, skip_frames=1):
"""Helper for use in source mapping that returns an OpMetadata object."""
full_filename, lineno = inspect.stack()[skip_frames][1:3]
filename = os.path.basename(full_filename)
return OpMetadata(
op_type=op_type,
op_name=op_name,
source_file=filename,
source_line=lineno)
PrimitiveType = _xla.PrimitiveType
bfloat16 = _xla.bfloat16_dtype()
XLA_ELEMENT_TYPE_TO_DTYPE = {
PrimitiveType.PRED: np.dtype('bool'),
PrimitiveType.S8: np.dtype('int8'),
PrimitiveType.S16: np.dtype('int16'),
PrimitiveType.S32: np.dtype('int32'),
PrimitiveType.S64: np.dtype('int64'),
PrimitiveType.U8: np.dtype('uint8'),
PrimitiveType.U16: np.dtype('uint16'),
PrimitiveType.U32: np.dtype('uint32'),
PrimitiveType.U64: np.dtype('uint64'),
PrimitiveType.BF16: np.dtype(bfloat16),
PrimitiveType.F16: np.dtype('float16'),
PrimitiveType.F32: np.dtype('float32'),
PrimitiveType.F64: np.dtype('float64'),
PrimitiveType.C64: np.dtype('complex64'),
PrimitiveType.C128: np.dtype('complex128'),
PrimitiveType.TUPLE: np.dtype(np.object_),
PrimitiveType.TOKEN: np.dtype(np.object_),
}
# Note the conversion on the key. Numpy has a known issue wherein dtype hashing
# doesn't work as expected (https://github.com/numpy/numpy/issues/7242). Thus,
# when keying by dtype in this dict, we use the string form of dtypes.
DTYPE_TO_XLA_ELEMENT_TYPE = {
str(dt): et for et, dt in XLA_ELEMENT_TYPE_TO_DTYPE.items()
}
def dtype_to_etype(dtype):
"""Convenience function for reading DTYPE_TO_XLA_ELEMENT_TYPE."""
return DTYPE_TO_XLA_ELEMENT_TYPE[str(np.dtype(dtype))]
Shape = _xla.Shape
Shape.__doc__ = """
A Shape is an object defined in C++ that duck types like the following class:
class Shape(object):
'''Represents an XLA shape.
A shape is either an array shape, having rank-many integer
dimensions and an element type (represented by a Numpy dtype), or it
is a tuple shape, having a shape for every tuple component:
type shape =
TupleShape of shape list
| ArrayShape of { dimensions: int list; element_type: dtype }
'''
@staticmethod
def tuple_shape(tuple_shapes) -> Shape:
"Construct a tuple shape."
@staticmethod
def array_shape(element_type, dimensions, minor_to_major=None) -> Shape:
@staticmethod
def from_pyval(pyval) -> Shape:
"Returns a Shape that describes a tuple-tree of Numpy arrays."
def __init__(self, str) -> Shape:
"Parses a shape string."
def __eq__(self, other: Shape) -> bool:
def __ne__(self, other: Shape) -> bool:
def __hash__(self):
def __repr__(self):
def is_tuple(self) -> bool:
def is_array(self) -> bool:
def tuple_shapes(self) -> [Shape]:
def numpy_dtype(self) -> np.dtype:
"Lik
|
AiAeGames/DaniBot
|
dispatcher.py
|
Python
|
gpl-3.0
| 4,840
| 0.000207
|
import re
import asyncio
import threading
from collections import defaultdict
def connector(bot, dispatcher, NICK, CHANNELS, PASSWORD=None):
@bot.on('client_connect')
async def connect(**kwargs):
bot.send('USER', user=NICK, realname=NICK)
if PASSWORD:
bot.send('PASS', password=PASSWORD)
bot.send('NICK', nick=NICK)
# Don't try to join channels until the server has
# sent the MOTD, or signaled that there's no MOTD.
done, pending = await asyncio.wait(
[bot.wait("RPL_ENDOFMOTD"),
bot.wait("ERR_NOMOTD")],
loop=bot.loop,
return_when=asyncio.FIRST_COMPLETED
)
# Cancel whichever waiter's event didn't come in.
for future in pending:
future.cancel()
for channel in CHANNELS:
bot.send('JOIN', channel=channel)
@bot.on('client_disconnect')
async def reconnect(**kwargs):
# Wait a second so we don't flood
await asyncio.s
|
leep(5, loop=bot.loop)
# Schedule a connection when the loop's next available
bot.loop.create_task(bot.connect())
|
# Wait until client_connect has triggered
await bot.wait("client_connect")
@bot.on('ping')
def keepalive(message, **kwargs):
bot.send('PONG', message=message)
@bot.on('privmsg')
def message(host, target, message, **kwargs):
if host == NICK:
# don't process messages from the bot itself
return
if target == NICK:
# private message
dispatcher.handle_private_message(host, message)
else:
# channel message
dispatcher.handle_channel_message(host, target, message)
class Dispatcher(object):
def __init__(self, client):
self.client = client
self._callbacks = []
self.register_callbacks()
def _register_callbacks(self, callbacks):
"""\
Hook for registering custom callbacks for dispatch patterns
"""
self._callbacks.extend(callbacks)
def register_callbacks(self):
"""\
Hook for registering callbacks with connection -- handled by __init__()
"""
self._register_callbacks((
(re.compile(pattern), callback)
for pattern, callback in self.command_patterns()
))
def _process_command(self, nick, message, channel):
results = []
for pattern, callback in self._callbacks:
match = pattern.search(message) or pattern.search('/privmsg')
if match:
results.append(
callback(nick, message, channel, **match.groupdict()))
return results
def handle_private_message(self, nick, message):
for result in self._process_command(nick, message, None):
if result:
self.respond(result, nick=nick)
def handle_channel_message(self, nick, channel, message):
for result in self._process_command(nick, message, channel):
if result:
self.respond(result, channel=channel)
def command_patterns(self):
"""\
Hook for defining callbacks, stored as a tuple of 2-tuples:
return (
('/join', self.room_greeter),
('!find (^\s+)', self.handle_find),
)
"""
raise NotImplementedError
def respond(self, message, channel=None, nick=None):
"""\
Multipurpose method for sending responses to channel or via message to
a single user
"""
if channel:
if not channel.startswith('#'):
channel = '#%s' % channel
self.client.send('PRIVMSG', target=channel, message=message)
elif nick:
self.client.send('PRIVMSG', target=nick, message=message)
class Locker(object):
def __init__(self, delay=None, user=""):
self.delay = delay if delay or delay == 0 and type(delay) == int else 5
self.locked = False
def lock(self):
if not self.locked:
if self.delay > 0:
self.locked = True
t = threading.Timer(self.delay, self.unlock, ())
t.daemon = True
t.start()
return self.locked
def unlock(self):
self.locked = False
return self.locked
def cooldown(delay):
def decorator(func):
if not hasattr(func, "__cooldowns"):
func.__cooldowns = defaultdict(lambda: Locker(delay))
def inner(*args, **kwargs):
nick = args[1]
user_cd = func.__cooldowns[nick]
if user_cd.locked:
return
ret = func(*args, **kwargs)
user_cd.lock()
return ret
return inner
return decorator
|
deepmind/rlax
|
rlax/_src/nonlinear_bellman.py
|
Python
|
apache-2.0
| 7,670
| 0.004824
|
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Transformed value functions.
Canonical value functions map states onto the expected discounted sum of rewards
that may be collected by an agent from any starting state. Value functions may
also be defined as the fixed points of certain linear recursive relations known
as Bellman equations. It is sometimes useful to consider transformed values that
are the solution to non-linear generalization of traditional Bellman equations.
In this subpackage we provide a general utility for wrapping bootstrapped return
calculations to construct regression targets for these transformed values.
We also use this to implement different learning algorithms from the literature.
"""
import collections
import functools
import chex
import jax.numpy as jnp
from rlax._src import base
from rlax._src import multistep
from rlax._src import transforms
Array = chex.Array
TxPair = collections.namedtuple('TxPair', ['apply', 'apply_inv'])
# Example transform pairs; these typically consist of a monotonically increasing
# squashing fn `apply` and its inverse `apply_inv`. Other choices are possible.
IDENTITY_PAIR = TxPair(
transforms.identity, transforms.identity)
SIGNED_LOGP1_PAIR = TxPair(
transforms.signed_logp1, transforms.signed_expm1)
SIGNED_HYPERBOLIC_PAIR = TxPair(
transforms.signed_hyperbolic, transforms.signed_parabolic)
HYPERBOLIC_SIN_PAIR = TxPair(
transforms.hyperbolic_arcsin, transforms.hyperbolic_sin)
def transform_values(build_targets, *value_argnums):
"""Decorator to convert targets to use transformed value function."""
@functools.wraps(build_targets)
def wrapped_build_targets(tx_pair, *args, **kwargs):
tx_args = list(args)
for index in value_argnums:
tx_args[index] = tx_pair.apply_inv(tx_args[index])
targets = build_targets(*tx_args, **kwargs)
return tx_pair.apply(targets)
return wrapped_build_targets
transformed_lambda_returns = transform_values(multistep.lambda_returns, 2)
transformed_general_off_policy_returns_from_action_values = transform_values(
multistep.general_off_policy_returns_from_action_values, 0)
transformed_n_step_returns = transform_values(
multistep.n_step_bootstrapped_returns, 2)
def transformed_q_lambda(
q_tm1: Array,
a_tm1: Array,
r_t: Array,
discount_t: Array,
q_t: Array,
lambda_: Array,
stop_target_gradients: bool = True,
tx_pair: TxPair = IDENTITY_PAIR,
) -> Array:
"""Calculates Peng's or Watkins' Q(lambda) temporal difference error.
See "General non-linear Bellman equations" by van Hasselt et al.
(https://arxiv.org/abs/1907.03687).
Args:
q_tm1: sequence of Q-values at time t-1.
a_tm1: sequence of action indices at time t-1.
r_t: sequence of rewards at time t.
discount_t: sequence of discounts at time t.
q_t: sequence of Q-values at time t.
lambda_: mixing parameter lambda, either a scalar (e.g. Peng's Q(lambda)) or
a sequence (e.g. Watkin's Q(lambda)).
stop_target_gradients: bool indicating whether or not to apply stop gradient
to targets.
tx_pair: TxPair of value function transformation and its inverse.
Returns:
Q(lambda) temporal difference error.
"""
chex.assert_rank([q_tm1, a_tm1, r_t, discount_t, q_t, lambda_],
[2, 1, 1, 1, 2, {0, 1}])
chex.assert_type([q_tm1, a_tm1, r_t, discount_t, q_t, lambda_],
[float, int, float, float, float, float])
qa_tm1 = base.batched_index(q_tm1, a_tm1)
v_t = jnp.max(q_t, axis=-1)
target_tm1 = transformed_lambda_returns(
tx_pair, r_t, discount_t, v_t, lambda_, stop_target_gradients)
return target_tm1 - qa_tm1
def transformed_retrace(
q_tm1: Array,
q_t: Array,
a_tm1: Array,
a_t: Array,
r_t: Array,
discount_t: Array,
pi_t: Array,
mu_t: Array,
lambda_: float,
eps: float = 1e-8,
stop_target_gradients: bool = True,
tx_pair: TxPair = IDENTITY_PAIR,
) -> Array:
"""Calculates transformed Retrace errors.
See "Recurrent Experience Replay in Distributed Reinforcement Learning" by
Kapturowski et al. (https://openreview.net/pdf?id=r1lyTjAqYX).
Args:
q_tm1: Q-values at time t-1.
q_t: Q-values at time t.
a_tm1: action index at time t-1.
a_t: action index at time t.
r_t: reward at time t.
discount_t: discount at time t.
pi_t: target policy probs at time t.
mu_t: behavior policy probs at time t.
lambda_: scalar mixing parameter lambda.
eps: small value to add to mu_t for numerical stability.
stop_target_gradients: bool indicating whether or not to apply stop gradient
to targets.
tx_pair: TxPair of value function transformation and its inverse.
Returns:
Transformed Retrace error.
"""
chex.assert_rank([q_tm1, q_t, a_tm1, a_t, r_t, discount_t, pi_t, mu_t],
[2, 2, 1, 1, 1, 1, 2, 1])
chex.assert_type([q_tm1, q_t, a_tm1, a_t, r_t, discount_t, pi_t, mu_t],
[float, float, int, int, float, float, float, float])
pi_a_t = base.batched_index(pi_t, a_t)
c_t = jnp.minimum(1.0, pi_a_t / (mu_t + eps)) * lambda_
target_tm1 = transformed_general_off_policy_returns_from_action_values(
tx_pair, q_t, a_t, r_t, discount_t, c_t, pi_t, stop_target_gradients)
q_a_tm1 = base.batched_index(q_tm1, a_tm1)
return target_tm1 - q_a_tm
|
1
def transformed_n_step_q_learning(
q_tm1: Array,
a_tm1: Array,
target_q_t: Array,
a_t: Array,
r_t: Array,
discount_t: Array,
n: int,
stop_target_gradients: bool = True,
tx_pair: TxPair = IDENTITY_PAIR,
) -> Array:
"""Calculates transformed n-step TD errors.
See "Recurrent Experience Replay in Distributed Reinforcement Learning" by
Kapturowski et al. (https://openreview.net
|
/pdf?id=r1lyTjAqYX).
Args:
q_tm1: Q-values at times [0, ..., T - 1].
a_tm1: action index at times [0, ..., T - 1].
target_q_t: target Q-values at time [1, ..., T].
a_t: action index at times [[1, ... , T]] used to select target q-values to
bootstrap from; max(target_q_t) for normal Q-learning, max(q_t) for double
Q-learning.
r_t: reward at times [1, ..., T].
discount_t: discount at times [1, ..., T].
n: number of steps over which to accumulate reward before bootstrapping.
stop_target_gradients: bool indicating whether or not to apply stop gradient
to targets.
tx_pair: TxPair of value function transformation and its inverse.
Returns:
Transformed N-step TD error.
"""
chex.assert_rank([q_tm1, target_q_t, a_tm1, a_t, r_t, discount_t],
[2, 2, 1, 1, 1, 1])
chex.assert_type([q_tm1, target_q_t, a_tm1, a_t, r_t, discount_t],
[float, float, int, int, float, float])
v_t = base.batched_index(target_q_t, a_t)
target_tm1 = transformed_n_step_returns(
tx_pair, r_t, discount_t, v_t, n,
stop_target_gradients=stop_target_gradients)
q_a_tm1 = base.batched_index(q_tm1, a_tm1)
return target_tm1 - q_a_tm1
|
Pikecillo/genna
|
external/PyXML-0.8.4/demo/quotes/qtfmt.py
|
Python
|
gpl-2.0
| 13,896
| 0.008923
|
#!/usr/bin/env python
#
# qtfmt.py v1.10
# v1.10 : Updated to use Python 2.0 Unicode type.
#
# Read a document in the quotation DTD, converting it to a list of Quotation
# objects. The list can then be output in several formats.
__doc__ = """Usage: qtfmt.py [options] file1.xml file2.xml ...
If no filenames are provided, standard input will be read.
Available options:
-f or --fortune Produce output for the fortune(1) program
-h or --html Produce HTML output
-t or --text Produce plain text output
-m N or --max N Suppress quotations longer than N lines;
defaults to 0, which suppresses no quotations at all.
"""
import string, re, cgi, types
import codecs
from xml.sax import saxlib, saxexts
def simplify(t, indent="", width=79):
"""Strip out redundant spaces, and insert newlines to
wrap the text at the given width."""
t = string.strip(t)
t = re.sub('\s+', " ", t)
if t=="": return t
t = indent + t
t2 = ""
while len(t) > width:
index = string.rfind(t, ' ', 0, width)
if index == -1: t2 = t2 + t[:width] ; t = t[width:]
else: t2 = t2 + t[:index] ; t = t[index+1:]
t2 = t2 + '\n'
return t2 + t
class Quotation:
"""Encapsulates a single quotation.
Attributes:
stack -- used during construction and then deleted
text -- A list of Text() instances, or subclasses of Text(),
containing the text of the quotation.
source -- A list of Text() instances, or subclasses of Text(),
containing the source of the quotation. (Optional)
author -- A list of Text() instances, or subclasses of Text(),
containing the author of the quotation. (Optional)
Methods:
as_fortune() -- return the quotation formatted for fortune
as_html() -- return an HTML version of the quotation
as_text() -- return a plain text version of the quotation
"""
def __init__(self):
self.stack = [ Text() ]
self.text = []
def as_text(self):
"Convert instance into a pure text form"
output = ""
def flatten(textobj):
"Flatten a list of subclasses of Text into a list of paragraphs"
if type(textobj) != types.ListType: textlist=[textobj]
else: textlist = textobj
paragraph = "" ; paralist = []
for t in textlist:
if (isinstance(t, PreformattedText) or
isinstance(t, CodeFormattedText) ):
paralist.append(paragraph)
paragraph = ""
paralist.append(t)
elif isinstance(t, Break):
paragraph = paragraph + t.as_text()
paralist.append(paragraph)
paragraph = ""
else:
paragraph = paragraph + t.as_text()
paralist.append(paragraph)
return paralist
# Flatten the list of instances into a list of paragraphs
paralist = flatten(self.text)
if len(paralist) > 1:
indent = 2*" "
else:
indent = ""
for para in paralist:
if isinstance(para, PreformattedText) or isinstance(para, CodeFormattedText):
output = output + para.as_text()
else:
output = output + simplify(para, indent) + '\n'
attr = ""
for i in ['author', 'source']:
if hasattr(self, i):
paralist = flatten(getattr(self, i))
text = string.join(paralist)
if attr:
attr = attr + ', '
text = string.lower(text[:1]) + text[1:]
attr = attr + text
attr=simplify(attr, width = 79 - 4 - 3)
if attr: output = output + ' -- '+re.sub('\n', '\n ', attr)
return output + '\n'
def as_fortune(self):
return self.as_text() + '%'
def as_html(self):
output = "<P>"
def flatten(textobj):
if type(textobj) != types.ListType: textlist = [textobj]
else: textlist = textobj
paragraph = "" ; paralist = []
for t in textlist:
paragraph = paragraph + t.as_html()
if isinstance(t, Break):
paralist.append(paragraph)
paragraph = ""
paralist.append(paragraph)
return paralist
paralist = flatten(self.text)
for para in paralist: output = output + string.strip(para) + '\n'
attr = ""
for i in ['author', 'source']:
if hasattr(self, i):
paralist = flatten(getattr(self, i))
text = string.join(paralist)
attr=attr + ('<P CLASS=%s>' % i) + string.strip(text)
return output + attr
# Text and its subclasses are used to hold chunks of text; instances
# know how to display themselves as plain text or as HTML.
class Text:
"Plain text"
def __init__(self, text=""):
self.text = text
# We need to allow adding a string to Text instances.
def __add__(self, val):
newtext = self.text + str(val)
# __class__ must be used so subclasses create instances of themselves.
return self.__class__(newtext)
def __str__(self): return self.text
def __repr__(self):
s = string.strip(self.text)
if len(s) > 15: s = s[0:15] + '...'
return '<%s: "%s">' % (self.__class__.__name__, s)
def as_text(self): return self.text
def as_html(self): return cgi.escape(self.text)
class PreformattedText(Text):
"Text inside <pre>...</pre>"
def as_text(self):
return str(self.text)
def as_html(self):
return '<pre>' + cgi.escape(str(self.text)) + '</pre>'
class CodeFormattedText(Text):
"Text inside <code>...</code>"
def as_text(self):
return str(self.text)
def as_html(self):
return '<code>' + cgi.escape(str(self.text)) + '</code>'
class CitedText(Text):
"Text inside <cite>...</cite>"
def as_text(self):
return '_' + simplify(str(self.text)) + '_'
def as_html(self):
return '<cite>' + string.strip(cgi.escape(str(self.text))) + '</cite>'
class ForeignText(Text):
"Foreign words, from Latin or French or whatever."
def as_text(self):
return '_' + simplify(str(self.text)) + '_'
def as_html(self):
return '<i>' + string.strip(cgi.escape(str(self.text))) + '</i>'
class EmphasizedText(Text):
"Text inside <em>...</em>"
def as_text(self):
return '*' + simpli
|
fy(str(self.text)) + '*'
def as_html(self):
return '<em>' + string.strip(cgi.escape(str(self.text))) + '</em>'
class Break(Text):
def as_text(self): return ""
def as_html(self): return "<P>"
#
|
The QuotationDocHandler class is a SAX handler class that will
# convert a marked-up document using the quotations DTD into a list of
# quotation objects.
class QuotationDocHandler(saxlib.HandlerBase):
def __init__(self, process_func):
self.process_func = process_func
self.newqt = None
# Errors should be signaled, so we'll output a message and raise
# the exception to stop processing
def fatalError(self, exception):
sys.stderr.write('ERROR: '+ str(exception)+'\n')
sys.exit(1)
error = fatalError
warning = fatalError
def characters(self, ch, start, length):
if self.newqt != None:
s = ch[start:start+length]
# Undo the UTF-8 encoding, converting to ISO Latin1, which
# is the default character set used for HTML.
latin1_encode = codecs.lookup('iso-8859-1') [0]
unicode_str = s
s, consumed = latin1_encode( unicode_str )
assert consumed == len( unicode_str )
self.newqt.stack[-1] = self.newqt.stack[-1] + s
def startDocument(self):
self.quote_list = []
def startElement(self, name, attrs):
methname = 'start_'+str(name)
if hasattr(self, methname):
method = getattr(self, methname)
method(attrs)
else:
|
HellerCommaA/flask-angular
|
lib/python2.7/site-packages/flask_restless/search.py
|
Python
|
mit
| 19,670
| 0.000153
|
"""
flask.ext.restless.search
~~~~~~~~~~~~~~~~~~~~~~~~~
Provides querying, searching, and function evaluation on SQLAlchemy models.
The most important functions in this module are the :func:`create_query`
and :func:`search` functions, which create a SQLAlchemy query object and
execute that query on a given model, respectively.
:copyright: 2011 by Lincoln de Sousa <lincoln@comum.org>
:copyright: 2012 Jeffrey Finkelstein <jeffrey.finkelstein@gmail.com>
:license: GNU AGPLv3+ or BSD
"""
import inspect
from sqlalchemy import and_ as AND
from sqlalchemy import or_ as OR
from sqlalchemy.ext.associationproxy import AssociationProxy
from sqlalchemy.orm.attributes import InstrumentedAttribute
from .helpers import unicode_keys_to_strings
from .helpers import session_query
from .helpers import get_related_association_proxy_model
def _sub_operator(model, argument, fieldname):
"""Recursively calls :func:`QueryBuilder._create_operation` when argument
is a dictionary of the form specified in :ref:`search`.
This function is for use with the ``has`` and ``any`` search operations.
"""
if isinstance(model, InstrumentedAttribute):
submodel = model.property.mapper.class_
elif isinstance(model, AssociationProxy):
submodel = get_related_association_proxy_model(model)
else: # TODO what to do here?
pass
if isinstance(argument, dict):
fieldname = argument['name']
operator = argument['op']
argument = argument['val']
relation = None
if '__' in fieldname:
fieldname, relation = fieldname.split('__')
return QueryBuilder._create_operation(submodel, fieldname, operator,
argument, relation)
# Support legacy has/any with implicit eq operator
return getattr(submodel, fieldname) == argument
#: The mapping from operator name (as accepted by the search method) to a
#: function which returns the SQLAlchemy expression corresponding to that
#: operator.
#:
#: Each of these functions accepts either one, two, or three arguments. The
#: first argument is the field object on which to apply the operator. The
#: second argument, where it exists, is either the second argument to the
#: operator or a dictionary as described below. The third argument, where it
#: exists, is the name of the field.
#:
#: For functions that accept three arguments, the second argument may be a
#: dictionary containing ``'name'``, ``'op'``, and ``'val'`` mappings so that
#: :func:`QueryBuilder._create_operation` may be applied recursively. For more
#: information and examples, see :ref:`search`.
#:
#: Some operations have multiple names. For example, the equality operation can
#: be described by the strings ``'=='``, ``'eq'``, ``'equals'``, etc.
OPERATORS = {
# Operators which accept a single argument.
'is_null': lambda f: f == None,
'is_not_null': lambda f: f != None,
# TODO what are these?
'desc': lambda f: f.desc,
'asc': lambda f: f.asc,
# Operators which accept two arguments.
'==': lambda f, a: f == a,
'eq': lambda f, a: f == a,
'equals': lambda f, a: f == a,
'equal_to': lambda f, a: f == a,
'!=': lambda f, a: f != a,
'ne': lambda f, a: f != a,
'neq': lambda f, a: f != a,
'not_equal_to': lambda f, a: f != a,
'does_not_equal': lambda f, a: f != a,
'>': lambda f, a: f > a,
'gt': lambda f, a: f > a,
'<': lambda f, a: f < a,
'lt': lambda f, a: f < a,
'>=': lambda f, a: f >= a,
'ge': lambda f, a: f >= a,
'gte': lambda f, a: f >= a,
'geq': lambda f, a: f >= a,
'<=': lambda f, a: f <= a,
'le': lambda f, a: f <= a,
'lte': lambda f, a: f <= a,
'leq': lambda f, a: f <= a,
'ilike': lambda f, a: f.ilike(a),
'like': lambda f, a: f.like(a),
'in': lambda f, a: f.in_(a),
'not_in': lambda f, a: ~f.in_(a),
# Operators which accept three arguments.
'has': lambda f, a, fn: f.has(_sub_operator(f, a, fn)),
'any': lambda f, a, fn: f.any(_sub_operator(f, a, fn)),
}
class OrderBy(object):
"""Represents an "order by" in a SQL query expression."""
def __init__(self, field, direction='asc'):
"""Instantiates this object with the specified attributes.
`field` is the name of the field by which to order the result set.
`direction` is either ``'asc'`` or ``'desc'``, for "ascending" and
"descending", respectively.
"""
self.field = field
self.direction = direction
def __repr__(self):
"""Returns a string representation of this object."""
return '<OrderBy {0}, {1}>'.format(self.field, self.direction)
class Filter(object):
"""Represents a filter to apply to a SQL query.
A filter can be, for example, a comparison operator applied to a field of a
model and a value or a comparison applied to two fields of the same
model. For more information on possible filters, see :ref:`search`.
"""
def __init__(self, fieldname, operator, argument=None, otherfield=None):
"""Instantiates this object with the specified attributes.
`fieldname` is the name of the field of a model which will be on the
left side of the operator.
`operator` is the string representation of an operator to apply. The
full list of recognized operators can be found at :ref:`
|
search`.
If `argument`
|
is specified, it is the value to place on the right side
of the operator. If `otherfield` is specified, that field on the model
will be placed on the right side of the operator.
.. admonition:: About `argument` and `otherfield`
Some operators don't need either argument and some need exactly one.
However, this constructor will not raise any errors or otherwise
inform you of which situation you are in; it is basically just a
named tuple. Calling code must handle errors caused by missing
required arguments.
"""
self.fieldname = fieldname
self.operator = operator
self.argument = argument
self.otherfield = otherfield
def __repr__(self):
"""Returns a string representation of this object."""
return '<Filter {0} {1} {2}>'.format(self.fieldname, self.operator,
self.argument or self.otherfield)
@staticmethod
def from_dictionary(dictionary):
"""Returns a new :class:`Filter` object with arguments parsed from
`dictionary`.
`dictionary` is a dictionary of the form::
{'name': 'age', 'op': 'lt', 'val': 20}
or::
{'name': 'age', 'op': 'lt', 'other': height}
where ``dictionary['name']`` is the name of the field of the model on
which to apply the operator, ``dictionary['op']`` is the name of the
operator to apply, ``dictionary['val']`` is the value on the right to
which the operator will be applied, and ``dictionary['other']`` is the
name of the other field of the model to which the operator will be
applied.
"""
fieldname = dictionary.get('name')
operator = dictionary.get('op')
argument = dictionary.get('val')
otherfield = dictionary.get('field')
return Filter(fieldname, operator, argument, otherfield)
class SearchParameters(object):
"""Aggregates the parameters for a search, including filters, search type,
limit, offset, and order by directives.
"""
def __init__(self, filters=None, limit=None, offset=None, order_by=None,
junction=None):
"""Instantiates this object with the specified attributes.
`filters` is a list of :class:`Filter` objects, representing filters to
be applied during the search.
`limit`, if not ``None``, specifies the maximum number of results to
return in the search.
`offset`, if not ``None``, specifies the number of initial results to
skip in the result set.
`order_by` is a list of :class:`OrderBy` objects, representi
|
witjoh/enigma2_dreambox_scanner
|
Scanners/Movies/Enigma2 Debug Scanner.py
|
Python
|
gpl-2.0
| 7,163
| 0.006003
|
import re, os, os.path
import Media, VideoFiles, Stack, Utils
import time
#
# this is a start to get to learn how to write scanners
#
debugfile = '/tmp/enigma2_movie_debug.log'
debug = True
def Scan(path, files, mediaList, subdirs, language=None, root=None, **kwargs):
def strip_name_from_ts_file(tsfile):
#Retrievess the programma name from a dreambox enigma2 file
#This has the form of YYYYMMDD HHMM - Channel - Programmename.ts
#code is borrowed from Enigma2 Movies.py by Quinten
#https://forums.plex.tv/index.php/topic/68991-scanner-for-enigma2-ts-file
#Also transforms the '_ ' to ': ' "
base_name = os.path.splitext(os.path.basename(tsfile))[0]
tmp_name = base_name.split(' - ' ,2)[2].strip()
return re.sub(r'_ ',': ', tmp_name)
if debug:
logfile = open(debugfile, 'a')
logfile.write("=======================================================\n")
logfile.write(time.strftime("%c"))
logfile.write(" --- Entering DREAMBOX DEBUG SCANNER\n")
logfile.write("recvieved following parameters :\n")
logfile.write("path parameter : ")
logfile.write(str(path))
logfile.write('\n')
logfile.write("files parameter : ")
logfile.write(str(files))
logfile.write('\n')
logfile.write("mediaList parameter : ")
logfile.write(str(mediaList))
logfile.write('\n')
logfile.write("subdirs parameter : ")
logfile.write(str(subdirs))
logfile.write('\n')
logfile.write("language parameter : ")
logfile.write(str(language))
logfile.write('\n')
logfile.write("root parameter : ")
logfile.write(str(root))
logfile.write('\n')
logfile.write("kwargs parameter : ")
logfile.write(str(kwargs))
logfile.write('\n')
logfile.write("========================================================\n")
logfile.write(" START PRFOCESSING FILES SECTION \n ")
year = ''
genre = ''
name = ''
for scan_file in files:
# Only process files having a ts extension (these are the movie files)
if scan_file.endswith(".ts"):
# chek then if we have the ts.meta meta file
if os.path.isfile(scan_file + ".meta"):
if debug:
logfile.write(str("found ts.meta file : " + str(scan_file + ".meta") + "\n"))
# lookup title and year from the ts.meta file
meta = open(scan_file + ".meta", 'r')
lines = meta.readlines()
if debug:
logfile.write("Content of the ")
logfile.write(str(scan_file + ".meta" + "\n"))
logfile.write(str(lines))
logfile.write("\n")
name = lines[1].strip()
if name:
if debug:
logfile.write(str("substracted the programname " + (name) + "\n"))
else:
# programma name is empty in the ts.meta file, so we take it from the filename
name = strip_name_from_ts_file(tsfile=scan_file)
if debug:
logfile.write(str("no title in ts.meta file found, abstracted form filename : " + (name) + "\n"))
# line 3 in the meta file contains multiple formats. In this directory we
# assume onlu movies are to be processed
# Most common format we have encounterd yet :
# <genre>.<year>.<short description> where all fields ar not always there
# <year>.<genre>.<short description>
# <descripton><(year)><(duration)>
# empty
# <free text>
if lines[2].strip():
# we do have content
if re.match(r'\d{4}\.', lines[2]):
if debug:
logfile.write("first RegExp matches, first field is the year\n")
line_array = lines[2].split('.', 2)
elements = len(line_array)
year = line_array[0]
if elements >= 2:
genre = line_array[1]
else:
genre = ''
if elements >= 3:
short_info = line_array[2]
else:
short_info = ''
elif re.search(r'\.\d{4}\.', lines[2]):
if debug:
logfile.write("Second RegExp matches, second field is the year\n")
line_array = lines[2].split('.', 2)
elements = len(l
|
ine_array)
genre = line_array[0]
|
year = line_array[1]
if elements >= 3:
short_info = line_array[2]
else:
short_info = ''
elif re.search(r'\(\d{4}\)', lines[2]):
if debug:
logfile.write("Third RegExp matches, year after info\n")
pat = re.compile(r'\(\d{4}\)')
res = pat.search(lines[2])
short_info = lines[2][0:res.start()-1]
year = lines[2][res.start()+1:res.end()-1]
genre = ''
else:
if debug:
logfile.write("line not empty, but no match found\n")
# we handle this as short info
genre = ''
year = ''
short_info = lines[2]
else:
# empty line
if debug:
logfile.write("empty line\n")
year = ''
genre = ''
short_info = ''
else:
name = strip_name_from_ts_file(tsfile=scan_file)
year = ''
if debug:
logfile.write(str("the year of the movie is " + str(year) + "\n"))
logfile.write(str("the genre of the movie is " + str(genre) + "\n"))
logfile.write(str("the info of the movie is " + str(short_info) + "\n"))
movie = Media.Movie(name, year)
movie.source = VideoFiles.RetrieveSource(scan_file)
movie.parts.append(scan_file)
if debug:
logfile.write("========================================\n")
logfile.write(str(movie))
logfile.write("\n")
logfile.write("========================================\n")
mediaList.append(movie)
if debug:
logfile.write(" ========================================================================\n")
logfile.close()
|
J22Melody/VPS-Monitor
|
manage.py
|
Python
|
gpl-2.0
| 254
| 0
|
#!/usr/bin/env python
import os
import
|
sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "vps_monitor.settings")
from d
|
jango.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
zejacobi/ProjectEuler
|
Solutions/0031.py
|
Python
|
unlicense
| 2,719
| 0.004433
|
"""
# PROBLEM 31
In England the currency is made up of pound, £, and pence, p, and there are eight coins in general circulation:
1p, 2p, 5p, 10p, 20p, 50p, £1 (100p) and £2 (200p).
It is possible to make £2 in the following way:
1×£1 + 1×50p + 2×20p + 1×5p + 1×2p + 3×1p
How many different ways can £2 be made using any number of coins?
"""
# So I checked and in Python, [1, 2, 3] == [1, 2, 3], [1, 2, 3] in [[1, 2, 3]] == True, etc.
# It's also true that sorted([1, 2, 3]) == sorted([3, 2, 1]), but I figure I'll save sorting
# overhead if I instead use an 8 element array (e.g. [1, 0, 0, 0, 0, 0, 0, 0] is one pence)
goal = 200
values_to_index = {
1: 0,
2: 1,
5: 2,
10: 3,
20: 4,
50: 5,
100: 6,
200: 7
}
coin_values = sorted(list(values_to_index.keys())) # the fact that keys is its own thing probably
# was fine, but I don't want any nasty surprises!
index_to_value = {values_to_index[value]: value for value in coin_values}
# Okay so the first thing we'll realize is that there is a certain allowable set of values for each
# coin; there are twenty one for one pence pieces and one for two pound pieces.
# (therefore I'm ignoring two pound pieces entirely and adding one the final result and I'll deal
# with single pence right at the start, allowing me to ignore them)
coin_values = coin_values[1:-1] # okay, so making this a list did come in useful
# (I promise I wrote the above comment 1st)
starting_arrays = [[[pence, 0, 0, 0, 0, 0, 0, 0], pence, 0] for pence in range(0, goal, 1)]
# packing these together lets me unpack them later
# the two easy cases, namely 1 200p coin and 200 1p coins, should be handled separately
final_arrays = [[0, 0, 0, 0, 0, 0, 0, 1], [goal, 0, 0, 0, 0, 0, 0, 0]]
def build_arrays(current, value, position):
if position > len(coin_values):
return
for current_position, pence in enumerate(coin_val
|
ues[position:]):
max_pieces = int((goal - value) / pence) + 1
for n in range(1, max_pieces):
if (value + pence * n) < goal:
new_array = current[:]
new_array[values_to_index[pence]] += n
build_arrays(new_array, value + pence * n, position + 1 + current_position)
# the +current_position turned out to be really key in making this run quickly
|
# seriously, it cuts down the run time 100-fold
elif (value + pence * n) == goal:
final = current[:]
final[values_to_index[pence]] += n
if final not in final_arrays:
final_arrays.append(final)
for array in starting_arrays[:]:
build_arrays(*array)
print(len(final_arrays))
|
Maethorin/py-inspector
|
tests/unitarios/test_validacao_pep8.py
|
Python
|
mit
| 1,941
| 0.002061
|
# -*- coding: utf-8 -*-
import unittest
from mock import MagicMock, patch
from py_inspector import verificadores
class CustomReport(unittest.TestCase):
def test_deve_retornar_quantidade_erros(self):
options = MagicMock()
report = verificadores.CustomReport(options)
report._deferred_print = []
report.file_errors = 3
report.get_file_results().should.be.equal(3)
def test_deve_montar_erros(self):
options = MagicMock()
report = verificadores.CustomReport(options)
report.filename = 'filename'
report.line_offset = 2
report.file_errors = 3
report._deferred_print = [(23, 2, 'code', 'text'
|
, '_'), (22, 3, 'code zas', 'text zas', '_ zas')]
rep
|
ort.get_file_results()
report.results.should.be.equal([{'path': 'filename', 'code': 'code zas', 'text': 'text zas', 'col': 4, 'row': 24}, {'path': 'filename', 'code': 'code', 'text': 'text', 'col': 3, 'row': 25}])
class ValidandoPep8(unittest.TestCase):
@patch('py_inspector.verificadores.assert_true')
def test_deve_passar_se_nao_tiver_erro_de_pep8(self, assert_mock):
verificadores.CustomReport.results = []
validador = verificadores.TestValidacaoPython()
from tests.unitarios import arquivo_sem_erro_pep8
validador.validacao_pep8([arquivo_sem_erro_pep8.__file__.replace('pyc', 'py')])
assert_mock.called.should.be.falsy
@patch('py_inspector.verificadores.assert_true')
def test_deve_dar_erro_se_tiver_erro_de_pep8(self, assert_mock):
verificadores.CustomReport.results = []
validador = verificadores.TestValidacaoPython()
from tests.unitarios import arquivo_com_erro_pep8
arquivo = arquivo_com_erro_pep8.__file__.replace('pyc', 'py')
validador.validacao_pep8([arquivo])
assert_mock.assert_called_with(False, 'PEP8 em {}:16:5 - E303: too many blank lines (2)'.format(arquivo))
|
qusp/orange3
|
Orange/preprocess/preprocess.py
|
Python
|
bsd-2-clause
| 6,516
| 0.00046
|
"""
Preprocess
----------
"""
import numpy as np
import sklearn.preprocessing as skl_preprocessing
import bottlechest
import Orange.data
from . import impute, discretize
from ..misc.enum import Enum
__all__ = ["Continuize", "Discretize", "Impute", "SklImpute"]
def is_continuous(var):
return isinstance(var, Orange.data.ContinuousVariable)
def is_discrete(var):
return isinstance(var, Orange.data.DiscreteVariable)
class Preprocess(object):
"""
A generic preprocessor class. All preprocessors need to inherit this
class. Preprocessors can be instantiated without the data set to return
data preprocessor, or can be given a data set to return the preprocessed
data.
Parameters
----------
data : a data table (default=None)
An optional data set to be preprocessed.
"""
def __new__(cls, data=None, *args, **kwargs):
self = super().__new__(cls)
if isinstance(data, Orange.data.Storage):
self.__init__(*args, **kwargs)
return self(data)
else:
return self
def __call__(self, data):
raise NotImplementedError("Subclasses need to implement __call__")
class Continuize(Preprocess):
MultinomialTreatment = Enum(
"Indicators", "FirstAsBase", "FrequentAsBase",
"Remove", "RemoveMultinomial", "ReportError", "AsOrdinal",
"AsNormalizedOrdinal", "Leave", "NormalizeBySpan",
"NormalizeBySD"
)
(Indicators, FirstAsBase, FrequentAsBase, Remove, RemoveMultinomial,
ReportError, AsOrdinal, AsNormalizedOrdinal, Leave,
NormalizeBySpan, NormalizeBySD) = MultinomialTreatment
def __init__(self, zero_based=True, multinomial_treatment=Indicators,
normalize_continuous=NormalizeBySD):
self.zero_based = zero_based
self.multinomial_treatment = multinomial_treatment
self.normalize_continuous = normalize_continuous
def __call__(self, data):
from . import continuize
continuizer = continuize.DomainContinuizer(
zero_based=self.zero_based,
multinomial_treatment=self.multinomial_treatment,
normalize_continuous=self.normalize_continuous)
domain = continuizer(data)
return data.from_table(domain, data)
class Discretize(Preprocess):
"""
Construct a discretizer, a preprocessor for discretization of
continuous fatures.
Parameters
----------
method : discretization method (default: Orange.preprocess.discretize.Discretization)
"""
def __init__(self, method=None):
self.method = method
def __call__(self, data):
"""
Compute and apply discretization of the given data. Returns a new
data table.
Parameters
----------
data : Orange.data.Table
A data table to be discretized.
"""
def transform(var):
if is_continuous(var):
new_var = method(data, var)
if new_var is not None and len(new_var.values) >= 2:
return new_var
else:
return None
else:
return var
method = self.method or discretize.EqualFreq()
attributes = [transform(var) for var in data.domain.attributes]
attributes = [var for var in attributes if var is not None]
domain = Orange.data.Domain(
attributes, data.domain.class_vars, data.domain.metas)
return data.from_table(domain, data)
class Impute(Preprocess):
"""
Construct a imputer, a preprocessor for imputation of missing values in
the data table.
Parameters
----------
method : imputation method (default: Orange.preprocess.impute.Average())
"""
def __init__(self, method=Orange.preprocess.impute.Average()):
self.method = method
def __call__(self, data):
"""
Apply an imputation method to the given data set. Returns a new
data table with missing values replaced by their imputations.
Parameters
----------
data : Orange.data.Table
An input data table.
"""
method = self.method or impute.Average()
newattrs = [method(data, var) for var in data.doma
|
in.attributes]
domain = Orange.data.Domain(
newattrs, data.domain.class_vars, data.domain.metas)
return data.from_table(domain, data)
|
class SklImpute(Preprocess):
__wraps__ = skl_preprocessing.Imputer
def __init__(self, strategy='mean', force=True):
self.strategy = strategy
self.force = force
def __call__(self, data):
if not self.force and not np.isnan(data.X).any():
return data
self.imputer = skl_preprocessing.Imputer(strategy=self.strategy)
X = self.imputer.fit_transform(data.X)
features = [impute.Average()(data, var, value) for var, value in
zip(data.domain.attributes, self.imputer.statistics_)]
domain = Orange.data.Domain(features, data.domain.class_vars,
data.domain.metas)
return Orange.data.Table(domain, X, data.Y, data.metas)
class RemoveConstant(Preprocess):
"""
Construct a preprocessor that removes features with constant values
from the data set.
"""
def __call__(self, data):
"""
Remove columns with constant values from the data set and return
the resulting data table.
Parameters
----------
data : an input data set
"""
oks = bottlechest.nanmin(data.X, axis=0) != \
bottlechest.nanmax(data.X, axis=0)
atts = [data.domain.attributes[i] for i, ok in enumerate(oks) if ok]
domain = Orange.data.Domain(atts, data.domain.class_vars,
data.domain.metas)
return Orange.data.Table(domain, data)
class PreprocessorList(object):
"""
Store a list of preprocessors and on call apply them to the data set.
Parameters
----------
preprocessors : list
A list of preprocessors.
"""
def __init__(self, preprocessors):
self.preprocessors = list(preprocessors)
def __call__(self, data):
"""
Applies a list of preprocessors to the data set.
Parameters
----------
data : an input data table
"""
for pp in self.preprocessors:
data = pp(data)
return data
|
googleads/google-ads-python
|
google/ads/googleads/v9/services/services/campaign_simulation_service/transports/base.py
|
Python
|
apache-2.0
| 3,952
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
import google.auth # type: ignore
from google.api_core import gapic_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.ads.googleads.v9.resources.types import campaign_simulation
from google.ads.googleads.v9.services.types import campaign_simulation_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class CampaignSimulationServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for CampaignSimulationService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",)
def __init
|
__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
|
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
self._wrapped_methods = {
self.get_campaign_simulation: gapic_v1.method.wrap_method(
self.get_campaign_simulation,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def get_campaign_simulation(
self,
) -> typing.Callable[
[campaign_simulation_service.GetCampaignSimulationRequest],
campaign_simulation.CampaignSimulation,
]:
raise NotImplementedError
__all__ = ("CampaignSimulationServiceTransport",)
|
palichis/elmolino
|
elmolino/wsgi.py
|
Python
|
gpl-2.0
| 391
| 0.002558
|
"""
WSGI config for elmolino project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.dja
|
ngoproject.com/en/1.7/howto/deployment/ws
|
gi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "elmolino.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
mozilla/verbatim
|
vendor/lib/python/translate/storage/tiki.py
|
Python
|
gpl-2.0
| 6,875
| 0.002327
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008 Mozilla Corporation, Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Class that manages TikiWiki files for translation. Tiki files are <strike>ugly and
inconsistent</strike> formatted as a single large PHP array with several special
sections identified by comments. Example current as of 2008-12-01::
<?php
// Many comments at the top
$lang=Array(
// ### Start of unused words
"aaa" => "zzz",
// ### end of unused words
// ### start of untranslated words
// "bbb" => "yyy",
// ### end of untranslated words
// ### start of possibly untranslated words
"ccc" => "xxx",
// ### end of possibly untranslated words
"ddd" => "www",
"###end###"=>"###end###");
?>
In addition there are several auto-generated //-style comments scattered through the
page and array, some of which matter when being parsed.
This has all been gleaned from the
`TikiWiki source <http://tikiwiki.svn.sourceforge.net/viewvc/tikiwiki/trunk/get_strings.php?view=markup>`_.
As far as I know no detailed documentation exists for the tiki language.php files.
"""
import datetime
import re
from translate.misc import wStringIO
from translate.storage import base
class TikiUnit(base.TranslationUnit):
"""A tiki unit entry."""
def __init__(self, source=None, encoding="UTF-8"):
self.location = []
super(TikiUnit, self).__init__(source)
def __unicode__(self):
"""Returns a string formatted to be inserted into a tiki language.php file."""
ret = u'"%s" => "%s",' % (self.source, self.target)
if self.location == ["untranslated"]:
ret = u'// ' + ret
return ret + "\n"
def addlocation(self, location):
"""Location is defined by the comments in the file. This function will only
set valid locations.
:param location: Where the string is located in the file. Must be a valid location.
"""
if location in ['unused', 'untranslated', 'possiblyuntranslated', 'translated']:
self.location.append(location)
def getlocations(self):
"""Returns the a list of the location(s) of the string."""
return self.location
class TikiStore(base.TranslationStore):
"""Represents a tiki language.php file."""
def __init__(self, inputfile=None):
"""If an inputfile is specified it will be parsed.
:param inputfile: Either a string or a filehandle of the source file
"""
base.TranslationStore.__init__(self, TikiUnit)
self.units = []
self.filename = getattr(inputfile, 'name', '')
if inputfile is not None:
self.parse(inputfile)
def __str__(self):
"""Will return a formatted tiki-style language.php file."""
_unused = []
_untranslated = []
_possiblyuntranslated = []
_translated = []
output = self._tiki_header()
# Reorder all the units into their groups
for unit in self.units:
if unit.getlocations() == ["unused"]:
_unused.append(unit)
elif unit.
|
getlocations() == ["untranslated"]:
_untranslated.append(unit)
elif unit.getlocations() == ["possiblyuntranslated"]:
_possiblyuntranslated.append(unit)
else:
_translated.append(unit)
output += "// ### Start of unused words\n"
for unit in _unused:
|
output += unicode(unit)
output += "// ### end of unused words\n\n"
output += "// ### start of untranslated words\n"
for unit in _untranslated:
output += unicode(unit)
output += "// ### end of untranslated words\n\n"
output += "// ### start of possibly untranslated words\n"
for unit in _possiblyuntranslated:
output += unicode(unit)
output += "// ### end of possibly untranslated words\n\n"
for unit in _translated:
output += unicode(unit)
output += self._tiki_footer()
return output.encode('UTF-8')
def _tiki_header(self):
"""Returns a tiki-file header string."""
return u"<?php // -*- coding:utf-8 -*-\n// Generated from po2tiki on %s\n\n$lang=Array(\n" % datetime.datetime.now()
def _tiki_footer(self):
"""Returns a tiki-file footer string."""
return u'"###end###"=>"###end###");\n?>'
def parse(self, input):
"""Parse the given input into source units.
:param input: the source, either a string or filehandle
"""
if hasattr(input, "name"):
self.filename = input.name
if isinstance(input, str):
input = wStringIO.StringIO(input)
_split_regex = re.compile(r"^(?:// )?\"(.*)\" => \"(.*)\",$", re.UNICODE)
try:
_location = "translated"
for line in input:
# The tiki file fails to identify each section so we have to look for start and end
# points and if we're outside of them we assume the string is translated
if line.count("### Start of unused words"):
_location = "unused"
elif line.count("### start of untranslated words"):
_location = "untranslated"
elif line.count("### start of possibly untranslated words"):
_location = "possiblyuntranslated"
elif line.count("### end of unused words"):
_location = "translated"
elif line.count("### end of untranslated words"):
_location = "translated"
elif line.count("### end of possibly untranslated words"):
_location = "translated"
match = _split_regex.match(line)
if match:
unit = self.addsourceunit("".join(match.group(1)))
# Untranslated words get an empty msgstr
if not _location == "untranslated":
unit.settarget(match.group(2))
unit.addlocation(_location)
finally:
input.close()
|
bryongloden/cppcheck
|
tools/listErrorsWithoutCWE.py
|
Python
|
gpl-3.0
| 645
| 0.006202
|
#!/usr/bin/python
import argparse
import xml.etree.ElementTree as ET
def main():
parser = argparse.ArgumentParser(description="List all error without a CWE assigned in CSV format")
parser.add_argument("-F", metavar="filename", required=True, help="XML file containing output from
|
: ./cppcheck --errorlist --xml-version=2")
parsed = parser.parse_args()
tree = ET.parse(vars(parsed)["F"])
root = tree.getroot()
for child in root.iter("error"):
if "cwe" not in child.attrib:
print child.attrib["id"], ",", child.attrib["severity"], ",
|
", child.attrib["verbose"]
if __name__ == "__main__":
main()
|
nordicpower/GameListPatch
|
interphase/util.py
|
Python
|
apache-2.0
| 17,408
| 0.003389
|
#Interphase - Copyright (C) 2009 James Garnon <http://gatc.ca/>
#Released under the MIT License <http://opensource.org/licenses/MIT>
from __future__ import division
import os
from env import engine
__docformat__ = 'restructuredtext'
class Text(object):
"""
Receives text to display on surface.
Arguments include the target surface for text rendering, font_type is a list of alternate font names, and font_size is the font size.
"""
_font = {}
_cache = {}
def __init__(self, surface, font_type=None, font_size=None):
self.screen = surface
x, y = self.screen.get_size()
self.dimension = {'x':x, 'y':y}
self.message = None
self.messages = []
if font_size:
self.font_size = int(font_size)
else:
self.font_size = 10
if isinstance(font_type, str):
font_type = [font_type]
if not Text._font:
engine.font.init()
font = None
if font_type:
font_type = ','.join(font_type)
if font_type.startswith('file:'):
font = font_type[5:].strip()
if not os.path.exists(font):
print('Font not found: %s' % font)
font = None
else:
font = engine.font.match_font(font_type)
if not font:
font_type = 'verdana, tahoma, bitstreamverasans, freesans, arial'
font = engine.font.match_font(font_type)
if not font:
font = engine.font.get_default_font()
font_type = font
Text._font['default'] = font
Text._font['defaults'] = font_type
Text._font[font] = { self.font_size:engine.font.Font(font,self.font_size) }
font_type = None
if font_type:
font_type = ','.join(font_type)
if font_type != Text._font['defaults']:
if font_type.startswith('file:'):
font_type = font_type[5:].strip()
if not os.path.exists(font_type):
print('Font not found: %s' % font_type)
font_type = None
else:
font_type = engine.font.match_font(font_type)
if font_type:
if font_type not in Text._font:
Text._font[font_type] = { self.font_size:engine.font.Font(font_type,self.font_size) }
else:
font_type = Text._font['default']
else:
font_type = Text._font['default']
else:
font_type = Text._font['default']
if self.font_size not in Text._font[font_type]:
Text._font[font_type][self.font_size] = engine.font.Font(font_type,self.font_size)
self.font_type = font_type
self.font = Text._font[self.font_type]
self.x = 0
self.y = 0
self.center = False
self.font_color = (255,0,0)
self.font_bgcolor = (0,0,0)
self.split_text = False
self.linesize = self.font[self.font_size].get_linesize()
self.margin = {'t':0, 'r':0, 'b':0, 'l':0}
self.multiline = False
self.cache = None
self.cache_key = None
def __call__(self, surface='default'):
"""Writes text to surface."""
if surface == 'default':
self.surface = self.screen
else:
self.surface = surface
self.update()
return self.surface
def render(self, surface='d
|
efault'):
"""Writes text to surface."""
if surface == 'default':
self.surface = self.screen
else:
self.surface = surface
self.update()
return self.surface
def add(self,*message_append):
"""Add to text."""
for item in message_append:
self.message = str(item)
self.message
|
s.append(self.message)
def set_position(self, position, center=False):
"""Set position to write text."""
x, y = position
if x < self.dimension['x'] and y < self.dimension['y']:
self.x = x
self.y = y
if center:
self.center = True
return True
else:
return False
def set_text_alignment(self, setting):
"""Set text alignment. Setting is 'center' or 'left'."""
if setting == 'center':
self.center = True
elif setting == 'left':
self.center = False
def set_margin(self, margin):
"""Set text margin."""
try:
self.margin['t'], self.margin['r'], self.margin['b'], self.margin['l'] = margin
except TypeError:
self.margin['t'] = self.margin['r'] = self.margin['b'] = self.margin['l'] = margin
def set_multiline(self, multiline=True):
"""Set multiline text."""
self.multiline = multiline
def set_font(self, font_type, default=False):
"""Set font of text."""
if isinstance(font_type, str):
font_type = [font_type]
font_type = ','.join(font_type)
if font_type == 'default':
font_type = Text._font['default']
self.font = Text._font[font_type]
self.font_type = font_type
elif font_type != Text._font['defaults']:
if font_type.startswith('file:'):
font = font_type[5:].strip()
if not os.path.exists(font):
print('Font not found: %s' % font)
font = None
else:
font = engine.font.match_font(font_type)
if font:
if font not in Text._font:
Text._font[font] = { self.font_size:engine.font.Font(font,self.font_size) }
self.font = Text._font[font]
self.font_type = font
if default:
Text._font['default'] = font
Text._font['defaults'] = font_type
self.linesize = self.font[self.font_size].get_linesize()
self.cache = None
def get_font(self, font_info='font'):
"""Get current font."""
if font_info == 'font':
return self.font_type
elif font_info == 'default':
return Text._font['default']
elif font_info == 'system':
return engine.font.get_fonts()
def get_font_size(self):
"""Get current font size."""
return self.font_size
def set_font_size(self, size):
"""Set font size of text."""
self.font_size = size
if size not in Text._font[self.font_type]:
Text._font[self.font_type][self.font_size] = engine.font.Font(self.font_type,self.font_size)
self.font = Text._font[self.font_type]
self.linesize = self.font[self.font_size].get_linesize()
self.cache = None
def set_font_color(self, color):
"""Set font color of text."""
self.font_color = color
self.cache = None
def set_font_bgcolor(self, color=None):
"""Set font background color."""
self.font_bgcolor = color
self.cache = None
def set_split_text(self, split_text=True):
"""Set whether text split to new line at space."""
self.split_text = split_text
def check_size(self, text):
"""Get size required for given text."""
width, height = self.font[self.font_size].size(text)
return width, height
def check_sizes(self, texts):
"""Get size required for a list of texts."""
text_size = {}
for text in texts:
text_size[text] = self.check_size(text)
return text_size
def surface_size(self, *dim):
"""Surface size needed to fit text. Return estimated width for col and height for row, adjusted for margins."""
try:
col, row = dim[0], dim[1]
except IndexError:
col, row = dim[0]
sizes = [self.check_size(char)[0] for char in 'abcdefghijklmnopqrstuvwxyz ']
charsize = sum(siz
|
ifcharming/original2.0
|
tests/scripts/xml2/xmlparser.py
|
Python
|
gpl-3.0
| 6,723
| 0.00119
|
# Copyright (C) 2009 by Ning Shi and Andy Pavlo
# Brown University
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import sys
import os
from urlparse import urlparse
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
from xml.sax.handler import feature_namespaces
from xml.sax.xmlreader import InputSource
try:
from cStringIO import StringIO as _StringIO
except:
from StringIO import StringIO as _StringIO
# Detects the directory which contains this file so that we can import httplib2.
cwd = os.getcwd()
realpath = os.path.realpath(__file__)
basedir = os.path.dirname(realpath)
basename = os.path.basename(realpath)
if not os.path.exists(realpath):
if os.path.exists(os.path.join(cwd, basename)):
basedir = cwd
sys.path.append(os.path.dirname(basedir))
|
from exceptions import *
class ContentParser(ContentHandler):
"""XML handler class.
This class is used by the SAX XML parser.
"""
__parallels = ("Statements",)
__terminals = ("SQL", "Status", "Info", "Result", "Se
|
ed")
def __init__(self, l):
"""Constructor.
'l': An empty dictionary to be filled with request pairs.
"""
ContentHandler.__init__(self)
self.rp_list = l
self.__current_key = []
self.__current = [self.rp_list]
def startElement(self, name, attrs):
"""Marks the start of an element.
This is the callback used by the SAX XML parser to notify us that a new
element begins.
'name': The name of the element.
'attrs': The attributes of the element.
"""
name = name.encode("utf-8")
if name in self.__terminals:
self.__current_key.append(name)
return
elif name in self.__parallels:
self.__current[-1][name] = []
self.__current.append(self.__current[-1][name])
else:
if type(self.__current[-1]) is list:
self.__current[-1].append({})
self.__current.append(self.__current[-1][-1])
else:
self.__current[-1][name] = {}
self.__current.append(self.__current[-1][name])
self.__current_key.append(name)
for n in attrs.getNames():
self.__current[-1][n.encode("utf-8")] = attrs.getValue(n)
def endElement(self, name):
"""Marks the end of an element.
This is the callback used by the SAX XML parser to notify us that an
opened element ends.
'name': The name of the element.
"""
name = name.encode("utf-8")
if self.__current_key[-1] != name:
raise InvalidXML("Start tag does not match end tag.")
if name not in self.__terminals:
self.__current.pop()
self.__current_key.pop()
def characters(self, content):
"""Marks the inner content of an element.
This is the callback used by the SAX XML parser to notify us about the
inner content of an element.
'content': The inner content.
"""
content = content.strip()
if content:
if len(self.__current_key) == 0:
raise InvalidXML("No tag opened.")
if type(self.__current[-1]) is list:
self.__current[-1].append(content)
else:
if self.__current_key[-1] in self.__current[-1]:
content = self.__current[-1][self.__current_key[-1]] + \
content
else:
content = content
self.__current[-1][self.__current_key[-1]] = content
class XMLParser:
"""The XML parser.
TODO: We should validate the XML before parsing.
"""
def __init__(self, url_file_string):
"""Constructor.
'url_file_string': A URL, a file handle, a filename which points to the
actual XML document or the actual XML data.
"""
self.url_file_string = url_file_string
self.rp_list = {}
self.__data = None
self.__parser = make_parser()
self.__parser.setFeature(feature_namespaces, 0)
self.__parser.setContentHandler(ContentParser(self.rp_list))
def __open_resource__(self):
"""Opens the resource depends on the type of information given.
If it is a file handle, nothing needs to be done; if it is the XML data,
make it readable like a file; if it is a filename, open it and return
the file handle.
Return: A handle to read from by calling the method 'read()' of the
handle.
"""
if hasattr(self.url_file_string, 'read'):
return self.url_file_string
if self.url_file_string == "-":
return sys.stdin
if self.url_file_string[0] == "<":
return _StringIO(self.url_file_string.encode("utf-8"))
try:
return open(self.url_file_string)
except:
pass
def __read_data__(self):
"""Reads the XML document.
"""
if self.__data:
return
fd = self.__open_resource__()
if fd:
data = fd.read()
fd.close()
self.__data = InputSource()
self.__data.setByteStream(_StringIO(data))
def __parse__(self):
"""Parses the XML document.
"""
if not self.__data:
self.__read_data__()
if self.__data:
self.__parser.parse(self.__data)
def get_data(self):
"""Gets the request pairs dictionary.
Return: The request pairs dictionary.
"""
if not self.__data:
self.__parse__()
return self.rp_list["SQLGenerator"]
|
izevg/CryptoLabs
|
first_lab.py
|
Python
|
gpl-2.0
| 8,202
| 0.003447
|
# -*- encoding: utf-8 -*-
from __future__ import division
import itertools
import operator
import collections
debug = True
to_bin = lambda integer: zero_filler(bin(integer)[2:])
def zero_filler(filling_string):
result = ""
if len(filling_string) != 4:
result = filling_string
else:
return filling_string
while len(result) != 4:
result = "0" + result
return result
def prettify_table(table, table_type, report_file=None):
out_table = {
"values_table": [],
"pretty_table": []
}
if table_type == "S1" or table_type == "S2":
for i in range(0, 2):
for j in range(0, 8):
value = table[i][j]
|
binary = to_bin(value)
out_table["pretty_table"].append(binary)
out_table["values_table"].append(value)
elif t
|
able_type == "S3":
arr1 = out_table; arr2 = out_table; arr = out_table
for j in range(0, 4):
for k in range(0, 2):
value = table[k][j]
binary = to_bin(value)
arr1["pretty_table"].append(binary)
arr1["values_table"].append(value)
for k in range(2, 4):
value = table[k][j]
binary = to_bin(value)
arr2["pretty_table"].append(binary)
arr2["values_table"].append(value)
arr.update(arr1); arr.update(arr2)
out_table.update(arr)
elif table_type == "delta_C_table":
iteration = 0
report_file.write("|\tInp 1\t|\tInp 2\t|\tOut 1\t|\tOut 2\t|\tDeltaC\t|\n")
report_file.write("=============================================================\n")
for i in table:
if iteration == 16:
report_file.write("=============================================================\n")
iteration = 0
report_file.write("|\t%s\t|\t%s\t|\t%s\t|\t%s\t|\t%s\t|\n" % (
to_bin(i["input_1"]),
to_bin(i["input_2"]),
to_bin(i["output_1"]),
to_bin(i["output_2"]),
to_bin(i["delta_C"])
))
iteration += 1
report_file.write("=============================================================\n")
return out_table
def gen_delta_A_tables():
values = {
"outputs": [],
"inputs": []
}
for i in range(0, 16):
values["outputs"].append(i)
for j in range(0, 16):
data = {
"first": j,
"second": i ^ j
}
values["inputs"].append(data)
return values
def gen_delta_C_tables(S_block_table, delta_A_table):
values = []
for i in delta_A_table["inputs"]:
input_data = {
"input_1": i["first"],
"input_2": i["second"],
}
output_data = {
"output_1": S_block_table[input_data["input_1"]],
"output_2": S_block_table[input_data["input_2"]],
}
delta_C_data = {
"delta_C": output_data["output_1"] ^ output_data["output_2"]
}
final_dict = dict()
final_dict.update(input_data)
final_dict.update(output_data)
final_dict.update(delta_C_data)
values.append(final_dict)
return values
def block_analysis_table(delta_C_table, t_type):
global report
# Объявляем переменные для этой функции
values = {
'table': {}, # Таблица количества значений
'probability': {}, # Таблица вероятностей
'max': [], # Максимальная вероятность
'bytes': [None for x in range(16)] # Массив индексов, у
# которых встречается
# максимальная вероятность
}
index = 0
j_divider = 0
# Устанавливаем граничное значение для заполнения таблиц
# вероятностей и количества значений
if t_type == "S1" or t_type == "S2":
j_divider = 8
elif t_type == "S3":
j_divider = 4
# Генерируем таблицы количества значений и вероятностей
for i in range(0, 16):
# Для 16ти элементов dA
arr1 = []; arr2 = []
for j in range(0, 16):
# Для 16ти элементов dC
value = delta_C_table[index]["delta_C"]
# Заполняем построчно, пока не встретим граничное
# значение счётчика
if j < j_divider:
arr1.append(value)
arr2.append(value / 16)
values['table'].update({i : arr1})
values['probability'].update({i : arr2})
index += 1
m = max(arr2)
values['max'].append(m)
values['max'] = max(values['max'])
if debug:
print("Maximum is %.4f" % values['max'])
for i in values['probability'].values():
probability.write("%s\n" % i)
maximum = values['max']
index = 0
for i in values['probability'].values():
try:
values['bytes'][index] = i.index(maximum)
except ValueError:
pass
index += 1
report.write("\n=====================\n")
index = 0
arr = []
for i in values['bytes']:
if i != None:
report.write("|\t%s\t|\t%s\t|\n" % (to_bin(index), to_bin(i)))
arr.append(to_bin(index))
index += 1
report.write("=====================\n\n")
values['bytes'] = arr
return values
def input_diff_summ(delta_A_summary):
result = []
for i in delta_A_summary[0]:
for j in delta_A_summary[1]:
for k in delta_A_summary[2]:
result.append(i + j + k)
# print(result)
return result
def wrapper(S_value, delta_A, report, delta_A_summary, table_count):
table = "S%d" % table_count
delta_C = gen_delta_C_tables(S_value, delta_A)
report.write("\n\ndC table for %s:\n" % table_count)
prettify_table(delta_C, "delta_C_table", report)
result = block_analysis_table(delta_C, table)
delta_A_summary.append(result['bytes'])
return result
S1_table = [[6, 3, 1, 7, 1, 4, 7, 3], [3, 2, 5, 4, 6, 7, 2, 5]]
S2_table = [[6, 2, 3, 2, 6, 1, 3, 4], [7, 5, 4, 5, 2, 1, 7, 5]]
S3_table = [[1, 1, 1, 2], [1, 2, 2, 1], [3, 2, 2, 3], [3, 3, 3, 1]]
P_table = [8, 7, 3, 2, 5, 4, 1, 6]
EP_table = [2, 5, 7, 3, 8, 6, 1, 4, 2, 6, 3, 5]
# print(gen_delta_A_tables())
report = open("report.txt", "w")
probability = open("probability.txt", "w")
pretty_S1 = prettify_table(S1_table, "S1")["pretty_table"]
S1_values = prettify_table(S1_table, "S1")["values_table"]
pretty_S2 = prettify_table(S2_table, "S2")["pretty_table"]
S2_values = prettify_table(S2_table, "S2")["values_table"]
pretty_S3 = prettify_table(S3_table, "S3")["pretty_table"]
S3_values = prettify_table(S3_table, "S3")["values_table"]
delta_A_summary = []
report.write("S1 table:\n")
for i in range(0, len(pretty_S1)):
report.write("|\t%s\t|\t%s\t|\n" % (to_bin(i), pretty_S1[i]))
report.write("S2 table:\n")
for i in range(0, len(pretty_S2)):
report.write("|\t%s\t|\t%s\t|\n" % (to_bin(i), pretty_S2[i]))
report.write("S3 table:\n")
for i in range(0, len(pretty_S3)):
report.write("|\t%s\t|\t%s\t|\n" % (to_bin(i), pretty_S3[i]))
delta_A = gen_delta_A_tables()
wrapper(S1_values, delta_A, report, delta_A_summary, 1)
wrapper(S2_values, delta_A, report, delta_A_summary, 2)
result = wrapper(S3_values, delta_A, report, delta_A_summary, 3)
for i in result["probability"].values():
probability.write("%s\n" % i)
diff = input_diff_summ(delta_A_summary)
print(len(diff))
arr = []
for i in diff:
needed = i[1] + i[5] + i[2] + i[4]
having = i[:4]
if having == needed:
arr.append(i)
probability.write("%s " % i)
# print(arr)
probability.close()
report.close()
|
coreyoconnor/nixops
|
nixops/resources/azure_dns_zone.py
|
Python
|
lgpl-3.0
| 4,631
| 0.003239
|
# -*- coding: utf-8 -*-
# Automatic provisioning of Azure DNS zones.
import os
import azure
import json
from requests import Request
try:
from urllib import quote
except:
from urllib.parse import quote
from nixops.util import attr_property
from nixops.azure_common import ResourceDefinition, ResourceState, ResId
from azure.common import AzureHttpError
from azure.mgmt.network import *
class AzureDNSZoneDefinition(ResourceDefinition):
"""Definition of an Azure DNS Zone"""
@classmethod
def get_type(cls):
return "azure-dns-zone"
@classmethod
def get_resource_type(cls):
return "azureDNSZones"
def __init__(self, xml):
ResourceDefinition.__init__(self, xml)
self.dns_zone_name = self.get_option_value(xml, 'name', str)
self.copy_option(xml, 'resourceGroup', 'resource')
self.copy_tags(xml)
def show_type(self):
return self.get_type()
class AzureDNSZoneState(ResourceState):
"""State of an Azure DNS Zone"""
dns_zone_name = attr_property("azure.name", None)
resource_group = attr_property("azure.resourceGroup", None)
tags = attr_property("azure.tags", {}, 'json')
@classmethod
def get_type(cls):
return "azure-dns-zone"
@property
def resource_id(self):
return self.dns_zone_name
@property
def full_name(self):
return "Azure DNS zone '{0}'".format(self.resource_id)
def is_settled(self, resource):
return True
def get_resource_url(self):
return ("https://management.azure.com/subscriptions/{0}"
"/resourceGroups/{1}/providers
|
/Microsoft.Network"
"/dnsZones/{2}?api-version=2015-05-04-preview"
.format(quote(self.subscription_id),
quote(self.resource_group),
quote(self.dns_zone_name)))
def mk_request(self, method):
http_request = Request()
http_request.url = self.get_resource_url()
http_request.method = method
http_request.headers['Content-Type'] = 'application/json'
return http_request
def get_resourc
|
e(self):
response = self.nrpc().send_request(self.mk_request('GET'))
if response.status_code == 200:
return json.loads(response.content.decode())
else:
return None
def destroy_resource(self):
response = self.nrpc().send_request(self.mk_request('DELETE'))
if response.status_code != 200:
raise AzureHttpError(response.content, response.status_code)
defn_properties = [ 'tags' ]
def _create_or_update(self, defn):
info = {
"location": "global",
"tags": defn.tags,
"properties": { }
}
http_request = self.mk_request('PUT')
http_request.data = json.dumps(info)
http_request.headers['Content-Length'] = len(http_request.data)
response = self.nrpc().send_request(http_request)
if response.status_code not in [200, 201]:
raise AzureHttpError(response.content, response.status_code)
self.state = self.UP
self.copy_properties(defn)
def create(self, defn, check, allow_reboot, allow_recreate):
self.no_subscription_id_change(defn)
self.no_property_change(defn, 'resource_group')
self.copy_mgmt_credentials(defn)
self.dns_zone_name = defn.dns_zone_name
self.resource_group = defn.resource_group
if check:
zone = self.get_settled_resource()
if not zone:
self.warn_missing_resource()
elif self.state == self.UP:
self.handle_changed_property('tags', zone['tags'])
else:
self.warn_not_supposed_to_exist()
self.confirm_destroy()
if self.state != self.UP:
if self.get_settled_resource():
raise Exception("tried creating a DNS zone that already exists; "
"please run 'deploy --check' to fix this")
self.log("creating {0}...".format(self.full_name))
self._create_or_update(defn)
if self.properties_changed(defn):
self.log("updating properties of {0}...".format(self.full_name))
self.get_settled_resource_assert_exists()
self._create_or_update(defn)
def create_after(self, resources, defn):
from nixops.resources.azure_resource_group import AzureResourceGroupState
return {r for r in resources
if isinstance(r, AzureResourceGroupState) }
|
VitalPet/c2c-rd-addons
|
product_price_property/__openerp__.py
|
Python
|
agpl-3.0
| 1,905
| 0.009449
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 ChriCar Beteiligungs- und Beratungs- GmbH (<http://www.camptocamp.at>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
|
{ 'sequence': 500,
'name': 'Product Price Property',
'version': '1.0',
'category': 'Accounting & Finance',
'description': """
Creates a poperty for list and standard price on product (not template).
this allows different prices for variants and companies
ATT - 6.1 has server bug - ir property can not defined on "_inherits" table
Warning :
- This methode will not work if prices are used through SQL queries in OpenERP. Like
in report.analytic.line.to_invoice or in stock valuation report
- May
|
cause incompatibility in custom module because the data model change
""",
'author': 'ChriCar Beteiligungs- und Beratungs- GmbH',
'depends': [ 'product'],
'data': [
],
#'data': ['product_view.xml'],
'demo_xml': [],
'installable': False,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
danielvdao/facebookMacBot
|
venv/lib/python2.7/site-packages/twilio/rest/resources/sip/domains.py
|
Python
|
mit
| 5,827
| 0
|
from .. import InstanceResource, ListResource
class IpAccessControlListMapping(InstanceResource):
def delete(self):
"""
Remove this mapping (disassociate the ACL from the Domain).
"""
return self.parent.delete_instance(self.name)
class IpAccessControlListMappings(ListResource):
name = "IpAccessControlListMappings"
key = "ip_access_control_list_mappings"
instance = IpAccessControlListMapping
def create(self, ip_access_control_list_sid, **kwargs):
"""Add a :class:`CredentialListMapping` to this domain.
:param sid: String identifier for an existing
:class:`CredentialList`.
"""
kwargs.update(ip_access_control_list_sid=ip_access_control_list_sid)
return self.create_instance(kwargs)
def delete(self, sid):
"""Remove a :class:`CredentialListMapping` from this domain.
:param sid: String identifier for a CredentialList resource
"""
return self.delete_instance(sid)
class CredentialListMapping(InstanceResource):
def delete(self):
"""
Remove this mapping (disassociate the CredentialList from the Domain).
"""
return self.parent.delete_instance(self.name)
class CredentialListMappings(ListResource):
name = "CredentialListMappings"
key = "credential_list_mappings"
instance = CredentialListMapping
def create(self, credential_list_sid, **kwargs):
"""Add a :class:`CredentialListMapping` to this domain.
:param sid: String identifier for an existing
:class:`CredentialList`.
"""
kwargs.update(credential_list_sid=credential_list_sid)
return self.create_instance(kwargs)
def delete(self, sid):
"""Remove a :class:`CredentialListMapping` from this domain.
:param sid: String identifier for a CredentialList resource
"""
return self.delete_instance(sid)
class Domain(InstanceResource):
"""An inbound SIP Domain.
.. attribute:: sid
A 34 character string that uniquely identifies this resource.
.. attribute:: account_sid
The unique id of the Account responsible for this domain.
.. attribute:: domain_name
A unique domain name for this inbound SIP endpoint. Must end in
.sip.twilio.com.
.. attribute:: friendly_name
A human-readable name for this SIP domain. (restrictions?)
.. attribute:: auth_type
???
.. attribute:: voice_url
The URL Twilio will request when this domain receives a call.
.. attribute:: voice_method
The HTTP method Twilio will use when requesting the above voice_url.
Either GET or POST.
.. attribute:: voice_fallback_url
The URL that Twilio will request if an error occurs retrieving or
executing the TwiML requested by voice_url.
.. attribute:: voice_fallback_method
The HTTP method Twilio will use when requesting the voice_fallback_url.
Either GET or POST.
.. attribute:: voice_status_callback_url
The URL that Twilio will request to pass status parameters (such as
call ended) to your application.
.. attribute:: voice_status_callback_method
The HTTP method Twilio will use to make requests to the status_callback
URL. Either GET or POST.
.. attribute:: date_created
The date that this resource was created.
.. attribute:: date_updated
The date that this resource was last updated.
"""
subresources = [IpAccessControlListMappings, CredentialListMappings]
def update(self, **kwargs):
"""
Update this :class:`Domain`
Available attributes to update are described above as instance
attributes.
"""
return self.parent.update_instance(self.name, kwargs)
def delete(self):
"""
Delete this domain.
"""
return self.parent.delete_instance(self.name)
class Domains(ListResource):
name = "Domains"
key = "domains"
instance = Domain
def create(self, domain_name, **kwargs):
""" Create a :class:`Domain`.
:param str domain_name: A unique domain name ending in
'.sip.twilio.com'
:param str friendly_name: A human-readable name for this domain.
:param str voice_url: The URL Twilio will request when this domain
receives a call.
:param voice_method: The HTTP method Twilio should use to request
voice_url.
:type voice_method: None (defaults to 'POST'), 'GET', or 'POST'
:param str voice_fallback_url: A URL that Twilio will request if an
error occurs requesting or executing the TwiML at voice_url
:param str voice_fallback_method: The HTTP method that Twilio should
use to request the fallback_url
:type voice_fallback_method: None (defaults to 'POST'),
'GET', or 'POST'
:param str voice_status_callback: A URL that Twilio will request when
the call ends to notify your app.
:param str voice_status_meth
|
od: The HTTP method Twilio should use when
requesting the above URL.
"""
kwargs['domain_name'] = domain_name
return self.create_instance(kwargs)
def update(self, sid, **kwargs):
"""
Update a :class:`Domain`
Available attributes to update are described above in :meth:`create`.
:param si
|
d: String identifier for a Domain resource
"""
return self.update_instance(sid, kwargs)
def delete(self, sid):
"""
Delete a :class:`Domain`.
:param sid: String identifier for a Domain resource
"""
return self.delete_instance(sid)
|
neuropoly/spinalcordtoolbox
|
spinalcordtoolbox/resampling.py
|
Python
|
mit
| 7,336
| 0.003544
|
#########################################################################################
#
# Resample data using nibabel.
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Authors: Julien Cohen-Adad, Sara Dupont
#
# About the license: see the file LICENSE.TXT
#########################################################################################
# TODO: remove resample_file (not needed)
import logging
import numpy as np
import nibabel as nib
from nibabel.processing import resample_from_to
from spinalcordtoolbox.image import Image, add_suffix
from spinalcordtoolbox.utils import display_viewer_syntax
logger = logging.getLogger(__name__)
def resample_nib(image, new_size=None, new_size_type=None, image_dest=None, interpolation='linear', mode='nearest'):
"""
Resample a nibabel or Image object based on a specified resampling factor.
Can deal with 2d, 3d or 4d image objects.
:param image: nibabel or Image image.
:param new_size: list of float: Resampling factor, final dimension or resolution, depending on new_size_type.
:param new_size_type: {'vox', 'factor', 'mm'}: Feature used for resampling. Examples:
new_size=[128, 128, 90], new_size_type='vox' --> Resampling to a dimension of 128x128x90 voxels
new_size=[2, 2, 2], new_size_type='factor' --> 2x isotropic upsampling
new_size=[1, 1, 5], new_size_type='mm' --> Resampling
|
to a resolution of 1x1x5 mm
:param image_dest: Destination image to resample the input image to. In this case, new_size and new_size_type
are ignored
:param interpolation: {'nn', 'linear', 'spline'}. The interpolation type
:param mode: Outside values are filled with 0 ('constant') or nearest value ('nearest').
:return: The
|
resampled nibabel or Image image (depending on the input object type).
"""
# set interpolation method
dict_interp = {'nn': 0, 'linear': 1, 'spline': 2}
# If input is an Image object, create nibabel object from it
if type(image) == nib.nifti1.Nifti1Image:
img = image
elif type(image) == Image:
img = nib.nifti1.Nifti1Image(image.data, image.hdr.get_best_affine())
else:
raise Exception(TypeError)
if image_dest is None:
# Get dimensions of data
p = img.header.get_zooms()
shape = img.header.get_data_shape()
if img.ndim == 4:
new_size += ['1'] # needed because the code below is general, i.e., does not assume 3d input and uses img.shape
# compute new shape based on specific resampling method
if new_size_type == 'vox':
shape_r = tuple([int(new_size[i]) for i in range(img.ndim)])
elif new_size_type == 'factor':
if len(new_size) == 1:
# isotropic resampling
new_size = tuple([new_size[0] for i in range(img.ndim)])
# compute new shape as: shape_r = shape * f
shape_r = tuple([int(np.round(shape[i] * float(new_size[i]))) for i in range(img.ndim)])
elif new_size_type == 'mm':
if len(new_size) == 1:
# isotropic resampling
new_size = tuple([new_size[0] for i in range(img.ndim)])
# compute new shape as: shape_r = shape * (p_r / p)
shape_r = tuple([int(np.round(shape[i] * float(p[i]) / float(new_size[i]))) for i in range(img.ndim)])
else:
raise ValueError("'new_size_type' is not recognized.")
# Generate 3d affine transformation: R
affine = img.affine[:4, :4]
affine[3, :] = np.array([0, 0, 0, 1]) # satisfy to nifti convention. Otherwise it grabs the temporal
logger.debug('Affine matrix: \n' + str(affine))
R = np.eye(4)
for i in range(3):
try:
R[i, i] = img.shape[i] / float(shape_r[i])
except ZeroDivisionError:
raise ZeroDivisionError("Destination size is zero for dimension {}. You are trying to resample to an "
"unrealistic dimension. Check your NIFTI pixdim values to make sure they are "
"not corrupted.".format(i))
affine_r = np.dot(affine, R)
reference = (shape_r, affine_r)
# If reference is provided
else:
if type(image_dest) == nib.nifti1.Nifti1Image:
reference = image_dest
elif type(image_dest) == Image:
reference = nib.nifti1.Nifti1Image(image_dest.data, image_dest.hdr.get_best_affine())
else:
raise Exception(TypeError)
if img.ndim == 3:
# we use mode 'nearest' to overcome issue #2453
img_r = resample_from_to(
img, to_vox_map=reference, order=dict_interp[interpolation], mode=mode, cval=0.0, out_class=None)
elif img.ndim == 4:
# TODO: Cover img_dest with 4D volumes
# Import here instead of top of the file because this is an isolated case and nibabel takes time to import
data4d = np.zeros(shape_r)
# Loop across 4th dimension and resample each 3d volume
for it in range(img.shape[3]):
# Create dummy 3d nibabel image
nii_tmp = nib.nifti1.Nifti1Image(img.get_data()[..., it], affine)
img3d_r = resample_from_to(
nii_tmp, to_vox_map=(shape_r[:-1], affine_r), order=dict_interp[interpolation], mode=mode,
cval=0.0, out_class=None)
data4d[..., it] = img3d_r.get_data()
# Create 4d nibabel Image
img_r = nib.nifti1.Nifti1Image(data4d, affine_r)
# Copy over the TR parameter from original 4D image (otherwise it will be incorrectly set to 1)
img_r.header.set_zooms(list(img_r.header.get_zooms()[0:3]) + [img.header.get_zooms()[3]])
# Convert back to proper type
if type(image) == nib.nifti1.Nifti1Image:
return img_r
elif type(image) == Image:
return Image(img_r.get_data(), hdr=img_r.header, orientation=image.orientation, dim=img_r.header.get_data_shape())
def resample_file(fname_data, fname_out, new_size, new_size_type, interpolation, verbose, fname_ref=None):
"""This function will resample the specified input
image file to the target size.
Can deal with 2d, 3d or 4d image objects.
:param fname_data: The input image filename.
:param fname_out: The output image filename.
:param new_size: The target size, i.e. 0.25x0.25
:param new_size_type: Unit of resample (mm, vox, factor)
:param interpolation: The interpolation type
:param verbose: verbosity level
:param fname_ref: Reference image to resample input image to
"""
# Load data
logger.info('load data...')
nii = nib.load(fname_data)
if fname_ref is not None:
nii_ref = nib.load(fname_ref)
else:
nii_ref = None
nii_r = resample_nib(nii, new_size.split('x'), new_size_type, image_dest=nii_ref, interpolation=interpolation)
# build output file name
if fname_out == '':
fname_out = add_suffix(fname_data, '_r')
else:
fname_out = fname_out
# save data
nib.save(nii_r, fname_out)
# to view results
display_viewer_syntax([fname_out], verbose=verbose)
return nii_r
|
JoseBlanca/franklin
|
test/gmod/read_source_test.py
|
Python
|
agpl-3.0
| 3,791
| 0.006858
|
'''
Created on 2009 eka 22
@author: peio
'''
# Copyright 2009 Jose Blanca, Peio Ziarsolo, COMAV-Univ. Politecnica Valencia
# This file is part of franklin.
# franklin is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# franklin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with franklin. If not, see <http://www.gnu.org/licenses/>.
import unittest
from franklin.gmod.read_source import (ReadSourceRegex, ReadSourceFile,
get_read_strain, ReadSources)
from StringIO import StringIO
REGEX_LIST = [('\w\wLE\w*', 'comav_library1'),
('\w\wUH\w*', 'comav_library1')
]
CLONE_READ_LIBRARY = '''read,clone,library
ESIE1234121,121313132clone, a
ESEE1234122,121313133clone,b
'''
class LibrarySourceRegexTest(unittest.TestCase):
'It uses this test to LibrarySourceRegex'
def test_basic_use(self):
'''It test the basic use of the class ReadSourceRegex'''
read_name = 'ESLE1234121'
library_regex = ReadSourceRegex(REGEX_LIST)
library_name = library_regex.get_library(read_name)
assert library_name == 'comav_library1'
try:
library_regex.get_clone(read_name)
|
self.fail()
except Exception:
pass
class LibrarySourceFileTest(unittest.TestCase):
'It uses this test to check LibrarySourceFile'
@staticmethod
def test_basic_use():
'''It test the basic use of the class ReadSourceFile'''
read = 'ESIE1234121'
fhand_cl
|
one_read = StringIO(CLONE_READ_LIBRARY)
read_source = ReadSourceFile(fhand_clone_read)
library = read_source.get_library(read)
clone = read_source.get_clone(read)
assert library == 'a'
assert clone == '121313132clone'
class ReadSourcesTest(unittest.TestCase):
'It test ReadSources class'
@staticmethod
def test_basic_use():
'''It test the basic use of the class ReadSources'''
read = 'ESLE1234121'
read1 = 'ESEE1234122'
fhand_clone_read = StringIO(CLONE_READ_LIBRARY)
read_source = ReadSourceFile(fhand_clone_read)
library_regex = ReadSourceRegex(REGEX_LIST)
read_sources = ReadSources([read_source, library_regex])
library = read_sources.get_library(read)
assert library == 'comav_library1'
library = read_sources.get_library(read1)
assert library == 'b'
LIBRARY_FILE = '''format-version:1
library_definition
name: a
type: library type:genomic
organism:Cucumis melo
cvterms: SO:0001, SO:0002
properties: property type:strain:Oregon-R, property type:stage:adult male
library_definition
name:b
type: library type:genomic
organism: Cucumis melo
cvterms:SO:0003, SO:0004
properties: property type:strain:a_fly, property type:stage:pupa
'''
class GetStrainTest(unittest.TestCase):
'It test get_strain function'
@staticmethod
def test_basic_use():
'It test get_strain function'
library_name = 'a'
fhand_library = StringIO(LIBRARY_FILE)
strain = get_read_strain(library_name, [fhand_library])
assert strain == 'Oregon-R'
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Majsvaffla/strecklista
|
strecklista/wsgi.py
|
Python
|
gpl-3.0
| 183
| 0.005464
|
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE
|
", "strecklista.settings.production")
application = get_wsgi_a
|
pplication()
|
yoeo/guesslang
|
guesslang/model.py
|
Python
|
mit
| 6,714
| 0
|
"""Machine learning model"""
from copy import deepcopy
import logging
from operator import itemgetter
from pathlib import Path
import shutil
from tempfile import TemporaryDirectory
from typing import List, Tuple, Dict, Any, Callable
import tensorflow as tf
from tensorflow.estimator import ModeKeys, Estimator
from tensorflow.python.training.tracking.tracking import AutoTrackable
LOGGER = logging.getLogger(__name__)
DATASET = {
ModeKeys.TRAIN: 'train',
ModeKeys.EVAL: 'valid',
ModeKeys.PREDICT: 'test',
}
class HyperParameter:
"""Model hyper parameters"""
BATCH_SIZE = 100
NB_TOKENS = 10000
VOCABULARY_SIZE = 5000
EMBEDDING_SIZE = max(10, int(VOCABULARY_SIZE**0.5))
DNN_HIDDEN_UNITS = [512, 32]
DNN_DROPOUT = 0.5
N_GRAM = 2
class Training:
"""Model training parameters"""
SHUFFLE_BUFFER = HyperParameter.BATCH_SIZE * 10
CHECKPOINT_STEPS = 1000
LONG_TRAINING_STEPS = 10 * CHECKPOINT_STEPS
SHORT_DELAY = 60
LONG_DELAY = 5 * SHORT_DELAY
def load(saved_model_dir: str) -> AutoTrackable:
"""Load a Tensorflow saved model"""
return tf.saved_model.load(saved_model_dir)
def build(model_dir: str, labels: List[str]) -> Estimator:
"""Build a Tensorflow text classifier """
config = tf.estimator.RunConfig(
model_dir=model_dir,
save_checkpoints_steps=Training.CHECKPOINT_STEPS,
)
categorical_column = tf.feature_column.categorical_column_with_hash_bucket(
key='content',
hash_bucket_size=HyperParameter.VOCABULARY_SIZE,
)
dense_column = tf.feature_column.embedding_column(
categorical_column=categorical_column,
dimension=HyperParameter.EMBEDDING_SIZE,
)
return tf.estimator.DNNLinearCombinedClassifier(
linear_feature_columns=[categorical_column],
dnn_feature_columns=[dense_column],
dnn_hidden_units=HyperParameter.DNN_HIDDEN_UNITS,
dnn_dropout=HyperParameter.DNN_DROPOUT,
label_vocabulary=labels,
n_classes=len(labels),
config=config,
)
def train(estimator: Estimator, data_root_dir: str, max_steps: int) -> Any:
"""Train a Tensorflow estimator"""
|
train_spec = tf.estimator.TrainSpec(
input_fn=_build_input_fn(data_root_dir, ModeKeys.TRAIN)
|
,
max_steps=max_steps,
)
if max_steps > Training.LONG_TRAINING_STEPS:
throttle_secs = Training.LONG_DELAY
else:
throttle_secs = Training.SHORT_DELAY
eval_spec = tf.estimator.EvalSpec(
input_fn=_build_input_fn(data_root_dir, ModeKeys.EVAL),
start_delay_secs=Training.SHORT_DELAY,
throttle_secs=throttle_secs,
)
LOGGER.debug('Train the model')
results = tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
training_metrics = results[0]
return training_metrics
def save(estimator: Estimator, saved_model_dir: str) -> None:
"""Save a Tensorflow estimator"""
with TemporaryDirectory() as temporary_model_base_dir:
export_dir = estimator.export_saved_model(
temporary_model_base_dir, _serving_input_receiver_fn
)
Path(saved_model_dir).mkdir(exist_ok=True)
export_path = Path(export_dir.decode()).absolute()
for path in export_path.glob('*'):
shutil.move(str(path), saved_model_dir)
def test(
saved_model: AutoTrackable,
data_root_dir: str,
mapping: Dict[str, str],
) -> Dict[str, Dict[str, int]]:
"""Test a Tensorflow saved model"""
values = {language: 0 for language in mapping.values()}
matches = {language: deepcopy(values) for language in values}
LOGGER.debug('Test the model')
input_function = _build_input_fn(data_root_dir, ModeKeys.PREDICT)
for test_item in input_function():
content = test_item[0]
label = test_item[1].numpy()[0].decode()
result = saved_model.signatures['predict'](content)
predicted = result['classes'].numpy()[0][0].decode()
label_language = mapping[label]
predicted_language = mapping[predicted]
matches[label_language][predicted_language] += 1
return matches
def predict(
saved_model: AutoTrackable,
mapping: Dict[str, str],
text: str
) -> List[Tuple[str, float]]:
"""Infer a Tensorflow saved model"""
content_tensor = tf.constant([text])
predicted = saved_model.signatures['serving_default'](content_tensor)
numpy_floats = predicted['scores'][0].numpy()
extensions = predicted['classes'][0].numpy()
probability_values = (float(value) for value in numpy_floats)
languages = (mapping[ext.decode()] for ext in extensions)
unsorted_scores = zip(languages, probability_values)
scores = sorted(unsorted_scores, key=itemgetter(1), reverse=True)
return scores
def _build_input_fn(
data_root_dir: str,
mode: ModeKeys,
) -> Callable[[], tf.data.Dataset]:
"""Generate an input fonction for a Tensorflow model"""
pattern = str(Path(data_root_dir).joinpath(DATASET[mode], '*'))
def input_function() -> tf.data.Dataset:
dataset = tf.data.Dataset
dataset = dataset.list_files(pattern, shuffle=True).map(_read_file)
if mode == ModeKeys.PREDICT:
return dataset.batch(1)
if mode == ModeKeys.TRAIN:
dataset = dataset.shuffle(Training.SHUFFLE_BUFFER).repeat()
return dataset.map(_preprocess).batch(HyperParameter.BATCH_SIZE)
return input_function
def _serving_input_receiver_fn() -> tf.estimator.export.ServingInputReceiver:
"""Function to serve model for predictions."""
content = tf.compat.v1.placeholder(tf.string, [None])
receiver_tensors = {'content': content}
features = {'content': tf.map_fn(_preprocess_text, content)}
return tf.estimator.export.ServingInputReceiver(
receiver_tensors=receiver_tensors,
features=features,
)
def _read_file(filename: str) -> Tuple[tf.Tensor, tf.Tensor]:
"""Read a source file, return the content and the extension"""
data = tf.io.read_file(filename)
label = tf.strings.split([filename], '.').values[-1]
return data, label
def _preprocess(
data: tf.Tensor,
label: tf.Tensor,
) -> Tuple[Dict[str, tf.Tensor], tf.Tensor]:
"""Process input data as part of a workflow"""
data = _preprocess_text(data)
return {'content': data}, label
def _preprocess_text(data: tf.Tensor) -> tf.Tensor:
"""Feature engineering"""
padding = tf.constant(['']*HyperParameter.NB_TOKENS)
data = tf.strings.bytes_split(data)
data = tf.strings.ngrams(data, HyperParameter.N_GRAM)
data = tf.concat((data, padding), axis=0)
data = data[:HyperParameter.NB_TOKENS]
return data
|
agry/NGECore2
|
scripts/mobiles/tatooine/bantha_matriarch.py
|
Python
|
lgpl-3.0
| 1,676
| 0.02685
|
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from jav
|
a.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('matriarch_bantha')
mobileTemplate.setLevel(15)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Herbivore Meat")
mobileTemplate.setMeatAmount(485)
mobileTemplate.setHideTy
|
pe("Wooly Hide")
mobileTemplate.setBoneAmount(365)
mobileTemplate.setBoneType("Animal Bones")
mobileTemplate.setHideAmount(320)
mobileTemplate.setSocialGroup("bantha")
mobileTemplate.setAssistRange(2)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_matriarch_bantha.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_bite_1')
attacks.add('bm_charge_1')
attacks.add('bm_dampen_pain_1')
attacks.add('bm_stomp_1')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('bantha_matriarch', mobileTemplate)
return
|
theonion/django-bulbs
|
bulbs/infographics/data_serializers.py
|
Python
|
mit
| 1,619
| 0.000618
|
from rest_framework import serializers
from bulbs.utils.fields import RichTextField
from bulbs.utils.data_serializers import CopySerializer, EntrySerializer, BaseEntrySerializer
from .fields import ColorField
class XYEntrySerializer(BaseEntrySerializer):
title = RichTextField(required=False, field_size="short")
copy_x = RichTextField(field_size="long")
copy_y = RichTextField(field_size="long")
class ComparisonKeySerializer(serializers.Serializer):
title = RichTextField(required=False, field_size="short")
color = ColorField(required=False)
initial = serializers.CharField()
class ComparisonSerializer(serializers.Serializer):
key_x = ComparisonKeySerializer(required=False)
key_y = ComparisonKeySerializer(required=False)
entries = XYEntrySerializer(required=False, many=True, child_label="entry")
class ListInfographicDataSerializer(serializers.Serializer):
is_numbered = serializers.BooleanField(default=False)
entries = EntrySerializer(many=True, required=False, child_label="entry")
|
class ProConSerializer(serializers.Serializer):
body = RichTextField(required=False, field_size="long")
pro = CopySerializer(required=False, many=True)
con = CopySerializer(required=False, many=True)
class StrongSideWeakSideSerializer(serializers.Serializer):
body = RichTextField(required=False, f
|
ield_size="long")
strong = CopySerializer(required=False, many=True)
weak = CopySerializer(required=False, many=True)
class TimelineSerializer(serializers.Serializer):
entries = EntrySerializer(many=True, required=False, child_label="entry")
|
hzlf/openbroadcast.org
|
website/apps/alibrary/apiv2/serializers.py
|
Python
|
gpl-3.0
| 9,901
| 0.000707
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.urlresolvers import reverse_lazy
from django.conf import settings
from rest_framework import serializers
from rest_flex_fields import FlexFieldsModelSerializer
from rest_flex_fields.serializers import FlexFieldsSerializerMixin
from easy_thumbnails.templatetags.thumbnail import thumbnail_url
from profiles.apiv2.serializers import ProfileSerializer
from ..models import (
Artist,
Label,
Release,
Media,
Playlist,
PlaylistItem,
PlaylistItemPlaylist,
)
SITE_URL = getattr(settings, "SITE_URL")
class ImageSerializer(serializers.ImageField):
def to_representation(self, instance):
if not instance:
return
return "{}{}".format(SITE_URL, thumbnail_url(instance, "thumbnail_240"))
class ArtistSerializer(
FlexFieldsModelSerializer, serializers.HyperlinkedModelSerializer
):
url = serializers.HyperlinkedIdentityField(
view_name="api:artist-detail", lookup_field="uuid"
)
ct = serializers.CharField(source="get_ct")
detail_url = serializers.URLField(source="get_absolute_url")
image = ImageSerializer(source="main_image")
class Meta:
model = Artist
depth = 1
fields = ["url", "ct", "created", "updated", "id", "detail_url", "uuid", "name", "image"]
class LabelSerializer(
FlexFieldsModelSerializer, serializers.HyperlinkedModelSerializer
):
url = serializers.HyperlinkedIdentityField(
view_name="api:label-detail", lookup_field="uuid"
)
ct = serializers.CharField(source="get_ct")
detail_url = serializers.URLField(source="get_absolute_url")
image = ImageSerializer(source="main_image")
class Meta:
model = Label
depth = 1
fields = ["url", "ct", "created", "updated", "id", "detail_url", "uuid", "name", "image"]
class MediaSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name="api:media-detail", lookup_field="uuid"
)
ct = serializers.CharField(source="get_ct")
detail_url = serializers.URLField(source="get_absolute_url")
duration = serializers.FloatField(source="master_duration")
artist = serializers.HyperlinkedRelatedField(
many=False, read_only=True, view_name="api:artist-detail", lookup_field="uuid"
)
release = serializers.HyperlinkedRelatedField(
many=False, read_only=True, view_name="api:release-detail", lookup_field="uuid"
)
artist_display = serializers.CharField(source="get_artist_display")
release_display = serializers.SerializerMethodField()
image = ImageSerializer(source="release.main_image")
def get_release_display(self, obj, **kwargs):
return obj.release.name if obj.release else None
assets = serializers.SerializerMethodField()
def get_assets(self, obj, **kwargs):
# TODO: propperly serialize assets
stream_url = reverse_lazy(
"mediaasset-format",
kwargs={"media_uuid": obj.uuid, "quality": "default", "encoding": "mp3"},
)
waveform_url = reverse_lazy(
"mediaasset-waveform", kwargs={"media_uuid": obj.uuid, "type": "w"}
)
assets = {
"stream": "{}{}".format(SITE_URL, stream_url),
"waveform": "{}{}".format(SITE_URL, waveform_url),
}
# TODO: check if this is a good idea...
# request asset generation for media
# print('request asset generation for {}'.format(obj))
# Format.objects.get_or_create_for_media(media=obj)
# Waveform.objects.get_or_create_for_media(media=obj, type=Waveform.WAVEFORM)
return assets
class Meta:
model = Media
depth = 1
fields = [
"url",
"ct",
"created",
"updated",
"id",
"detail_url",
"uuid",
"image",
"name",
"duration",
"assets",
"isrc",
"artist_display",
"release_display",
"artist",
"release",
]
class ReleaseSerializer(
FlexFieldsSerializerMixin, serializers.HyperlinkedModelSerializer
):
url = serializers.HyperlinkedIdentityField(
view_name="api:release-detail", lookup_field="uuid"
)
ct = serializers.CharField(source="get_ct")
image = ImageSerializer(source="main_image")
detail_url = serializers.URLField(source="get_absolute_url")
releasedate = serializers.CharField(source="releasedate_approx")
media = MediaSerializer(many=True, read_only=True, source="get_media")
artist_display = serializers.CharField(source="get_artist_display")
# label = serializers.HyperlinkedRelatedField(
# many=False,
# read_only=True,
# view_name='api:label-detail', lookup_field="uuid"
# )
label = LabelSerializer(
read_only=True,
)
# TODO: `items` is used for player only. find a way to unify this.
items = serializers.SerializerMethodField()
def get_items(self, obj, **kwargs):
items = []
for media in obj.get_media():
serializer = MediaSerializer(
media, context={"request": self.context["request"]}
)
items.append({"content": serializer.data})
return items
class Meta:
model = Release
depth = 1
fields = [
"url",
"ct",
"uuid",
"created",
"updated",
"id",
"detail_url",
"name",
"image",
"releasedate",
"artist_display",
"media",
"label",
# TODO: `items` is used for player only. find a way to unify this.
"items",
]
# expandable_fields = {
# 'label': (LabelSerializer, {'read_only': True})
# }
class PlaylistItemField(serializers.RelatedField):
"""
A custom field to use for the `item` generic relation
|
ship.
"""
def to_representation(self, value):
"""
Serialize tagged objects to a simple textual representation.
"""
if isinstance(value, Media):
# return 'Media: {}'.format(value.pk)
serializer = MediaSerializer(
value, context={"request": self.context["request"]}
)
elif isinstance(value,
|
Media):
return "Jingle: {}".format(value.pk)
else:
raise Exception("Unexpected type of tagged object")
return serializer.data
class PlaylistItemSerializer(serializers.ModelSerializer):
# http://www.django-rest-framework.org/api-guide/relations/#generic-relationships
content = PlaylistItemField(read_only=True, source="content_object")
class Meta:
model = PlaylistItem
depth = 1
fields = ["content"]
class PlaylistItemPlaylistSerializer(serializers.ModelSerializer):
# item = PlaylistItemSerializer(read_only=True)
content = serializers.SerializerMethodField()
def get_content(self, obj, **kwargs):
# TODO: implement for `Jingle`
if isinstance(obj.item.content_object, Media):
serializer = MediaSerializer(
instance=Media.objects.get(pk=obj.item.content_object.pk),
many=False,
context={"request": self.context["request"]},
)
elif isinstance(obj.item.content_object, Media):
serializer = MediaSerializer(
instance=Media.objects.get(pk=obj.item.content_object.pk),
many=False,
context={"request": self.context["request"]},
)
else:
raise Exception("Unexpected type of tagged object")
return serializer.data
class Meta:
model = PlaylistItemPlaylist
depth = 1
fields = [
# 'item',
"content",
"position",
"cue_in",
"cue_out",
"fade_in",
"fade_out",
|
Aiacos/DevPyLib
|
mayaLib/guiLib/__init__.py
|
Python
|
agpl-3.0
| 62
| 0
|
fro
|
m . import base
from . import mainMenu
from . import utils
| |
seoester/Git-Deployment-Handler
|
gitdh/tests/configgit.py
|
Python
|
mit
| 2,693
| 0.022651
|
import unittest, tempfile, os.path
from gitdh.config import Config
from gitdh.git import Git
from subprocess import check_output
class GitdhConfigGitTestCase(unittest.TestCase):
def setUp(self):
self.cStr = """
[Git]
RepositoryPath = /var/lib/gitolite/repositories/test.git
[Database]
Engine = sqlite
DatabaseFile = /var/lib/gitolite/data.sqlite
Table = commits
[master]
Path=/home/www/production
[development]
Path=/home/www/development
[crunch-command]
Mode = perfile
RegExp = \.php$
Command = eff_php_crunch ${f}
"""
def test_gitRepo(self):
d = tempfile.TemporaryDirectory()
self._createGitRepo(d.name)
c = Config.fromGitRepo(d.name)
self.assertTrue('Database' in c)
self.assertTrue('master' in c)
self.assertEqual(c['Database']['Engine'], 'sqlite')
self.assertEqual(c.repoPath, d.name)
c = Config.fromPath(d.name)
self.assertTrue('Database' in c)
self.assertTrue('master' in c)
self.assertEqual(c['Database']['Engine'], 'sqlite')
self.assertEqual(c.repoPath, d.name)
d.cleanup()
def test_bareGitRepo(self):
d = tempfile.TemporaryDirectory()
self._createBareGitRepo(d.name)
c = Config.fromGitRepo(d.name)
self.assertTrue('Database' in c)
self.assertTrue('master' in c)
self.assertEqual(c['Database']['Engine'], 'sqlite')
self.assertEqual(c.repoPath, d.name)
c = Config.fromPath(d.name)
self.assertTrue('Database' in c)
self.assertTrue('master' in c)
self.assertEqual(c['Database']['Engine'], 'sqlite')
self.assertEqual(c.repoPath, d.name)
d.cleanup()
def _createGitRepo(self, path):
check_output(('git', 'init'), cwd=path)
gC = Git(path)
gC._executeGitCommand('config', 'user.email test@localhost')
gC._executeGitCommand('config', 'user.name Test')
|
with open(os.path.join(path, 'README'), 'w') as f:
f.write('On master')
gC._executeGitCommand('add', '.')
gC._executeGitCommand
|
('commit', '-m "Initial Import"')
gC._executeGitCommand('branch', 'development')
gC._executeGitCommand('checkout', 'development', suppressStderr=True)
with open(os.path.join(path, 'README'), 'w') as f:
f.write('On development')
gC._executeGitCommand('add', '.')
gC._executeGitCommand('commit', '-m "Development branch added"')
gC._executeGitCommand('branch', 'gitdh')
gC._executeGitCommand('checkout', 'gitdh', suppressStderr=True)
with open(os.path.join(path, 'gitdh.conf'), 'w') as f:
f.write(self.cStr)
gC._executeGitCommand('add', '.')
gC._executeGitCommand('commit', '-m "Gitdh conf added"')
return gC
def _createBareGitRepo(self, path):
d = tempfile.TemporaryDirectory()
self._createGitRepo(d.name)
check_output(('git', 'clone', '-q', '--bare', d.name, path))
d.cleanup()
|
frishberg/django
|
tests/messages_tests/test_fallback.py
|
Python
|
bsd-3-clause
| 6,536
| 0.000459
|
from django.contrib.messages import constants
from django.contrib.messages.storage.fallback import (
CookieStorage, FallbackStorage,
)
from django.test import SimpleTestCase
from .base import BaseTests
from .test_cookie import set_cookie_data, stored_cookie_messages_count
from .test_session import set_session_data, stored_session_messages_count
class FallbackTest(BaseTests, SimpleTestCase):
storage_class = FallbackStorage
def get_request(self):
self.session = {}
request = super(FallbackTest, self).get_request()
request.session = self.session
return request
def get_cookie_storage(self, storage):
return storage.storages[-2]
def get_session_storage(self, storage):
return storage.storages[-1]
def stored_cookie_messages_count(self, storage, response):
return stored_cookie_messages_count(self.get_cookie_storage(storage), response)
def stored_session_messages_count(self, storage, response):
return stored_session_messages_count(self.get_session_storage(storage))
def stored_messages_count(self, storage, response):
"""
Return the storage totals from both cookie and session backends.
"""
return (
self.stored_cookie_messages_count(storage, response) +
self.stored_session_messages_count(storage, response)
)
def test_get(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
# Set initial cookie data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, example_messages)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
self.assertEqual(list(storage), example_messages)
def test_get_empty(self):
request = self.get_request()
storage = self.storage_class(request)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
self.assertEqual(list(storage), [])
def test_get_fallback(self):
request = self.get_request()
storage = self.storage_class(request)
|
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_me
|
ssages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, example_messages[:4] + [CookieStorage.not_finished])
set_session_data(session_storage, example_messages[4:])
self.assertEqual(list(storage), example_messages)
def test_get_fallback_only(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, [CookieStorage.not_finished], encode_empty=True)
set_session_data(session_storage, example_messages)
self.assertEqual(list(storage), example_messages)
def test_flush_used_backends(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
set_cookie_data(cookie_storage, ['cookie', CookieStorage.not_finished])
set_session_data(session_storage, ['session'])
# When updating, previously used but no longer needed backends are
# flushed.
response = self.get_response()
list(storage)
storage.update(response)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_no_fallback(self):
"""
(1) A short number of messages whose data size doesn't exceed what is
allowed in a cookie will all be stored in the CookieBackend.
(2) If the CookieBackend can store all messages, the SessionBackend
won't be written to at all.
"""
storage = self.get_storage()
response = self.get_response()
# Overwrite the _store method of the fallback storage to prove it isn't
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._store = None
for i in range(5):
storage.add(constants.INFO, str(i) * 100)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 5)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_session_fallback(self):
"""
If the data exceeds what is allowed in a cookie, messages which did
not fit are stored in the SessionBackend.
"""
storage = self.get_storage()
response = self.get_response()
# see comment in CookieText.test_cookie_max_length
msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37)
for i in range(5):
storage.add(constants.INFO, str(i) * msg_size)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 4)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
def test_session_fallback_only(self):
"""
Large messages, none of which fit in a cookie, are stored in the
SessionBackend (and nothing is stored in the CookieBackend).
"""
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'x' * 5000)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 0)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
|
akretion/logistics-center
|
stef_logistics/data/flow_delivery.py
|
Python
|
agpl-3.0
| 1,924
| 0
|
delivery_head = [
{
"seq": 1,
"len": 5,
"type": "A",
"col": "codenr",
"req": True,
"def": "E",
"allowed": ["E"],
"comment": "Code enregistrement",
},
{
"seq": 2,
"len": 20,
"type": "A",
"col": "cmdcli",
"req": Tru
|
e,
"comment": "N° de référence donneur d'ordre",
},
{
"seq": 3,
"len": 10,
"type": "A",
"col": "nomdos",
"req": True,
"comment": "Nom du dossier",
},
{
"seq": 4,
"le
|
n": 13,
"type": "I",
"col": "codgln",
"req": True,
"comment": "Code GLN (EAN/UCC-13) du site STEF-TFE",
},
{
"seq": 6,
"len": 8,
"type": "D1",
"col": "datliv",
"req": True,
"comment": "Date livr",
},
{
"seq": 7,
"len": 16,
"type": "A",
"col": "trsdst",
"req": True,
"comment": "Code du tiers destinataire à livrer",
},
]
delivery_line = [
{
"seq": 1,
"len": 5,
"type": "A",
"col": "codenr",
"req": True,
"def": "L",
"allowed": ["L"],
"comment": "Code du tiers destinataire à livrer",
},
{
"seq": 2,
"len": 20,
"type": "A",
"col": "cmdcli",
"req": True,
"comment": "N° de référence donneur d'ordre",
},
{
"seq": 3,
"len": 6,
"type": "I",
"col": "numlig",
"req": True,
"comment": "N° de ligne",
},
{
"seq": 4,
"len": 16,
"type": "A",
"col": "codprd",
"req": True,
"comment": "Code produit",
},
{
"seq": 6,
"len": 6,
"type": "I",
"col": "qliuc",
"req": False,
"comment": "Qté en UC",
},
]
|
thomazs/geraldo
|
site/newsite/site-geraldo/django/core/cache/backends/memcached.py
|
Python
|
lgpl-3.0
| 1,303
| 0.00307
|
"Memcached cache backend"
from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
from django.utils.encoding import smart_unicode, smart_str
from google.appengine.api import memcache
class CacheClass(BaseCache):
def __init__(self, server, params):
BaseCache.__init__(self, params)
self._cache = memcache.Client(server.split(';'))
def add(self, key, value, timeout=0):
if isinstance(value, unicode):
value = value.encode('utf-8')
return self._cache.add(smart_str(key), value, timeout or self.default_timeout)
def get(self, key, default=None):
val = self._c
|
ache.get(smart_str(key))
if val is None:
return default
else:
if isinstance(val, basestring):
return smart_unicode(val)
else:
return val
def set(self, key, value, timeout=0):
if isinstance(value, unicode):
value = value.encode('utf-8')
self._cache.set(smart_str(key),
|
value, timeout or self.default_timeout)
def delete(self, key):
self._cache.delete(smart_str(key))
def get_many(self, keys):
return self._cache.get_multi(map(smart_str,keys))
def close(self, **kwargs):
self._cache.disconnect_all()
|
CubicERP/odoo
|
addons/stock_landed_costs/stock_landed_costs.py
|
Python
|
agpl-3.0
| 20,934
| 0.004395
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.exceptions import Warning
from openerp.tools import float_compare
from openerp.tools.translate import _
import product
class stock_landed_cost(osv.osv):
_name = 'stock.landed.cost'
_description = 'Stock Landed Cost'
_inherit = 'mail.thread'
_track = {
'state': {
'stock_landed_costs.mt_stock_landed_cost_open': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'done',
},
}
def _total_amount(self, cr, uid, ids, name, args, context=None):
if context is None:
context = {}
currency_obj = self.pool.get('res.currency')
result = {}
local_context = context.copy()
for cost in self.browse(cr, uid, ids, context=context):
total = 0.0
local_context['date'] = cost.date
for line in cost.cost_lines:
price_unit = currency_obj.compute(cr, uid, line.currency_id.id, cost.company_id.currency_id.id, line.price_unit, context=local_context)
total += price_unit
result[cost.id] = total
return result
def _get_cost_line(self, cr, uid, ids, context=None):
cost_to_recompute = []
for line in self.pool.get('stock.landed.cost.lines').browse(cr, uid, ids, context=context):
cost_to_recompute.append(line.cost_id.id)
return cost_to_recompute
def get_valuation_lines(self, cr, uid, ids, picking_ids=None, context=None):
picking_obj = self.pool.get('stock.picking')
lines = []
if not picking_ids:
return lines
for picking in picking_obj.browse(cr, uid, picking_ids):
for move in picking.move_lines:
#it doesn't make sense to make a landed cost for a product that isn't set as being valuated in real time at real cost
if move.product_id.valuation != 'real_time' or move.product_id.cost_method != 'real':
continue
total_cost = 0.0
total_qty = move.product_qty
weight = move.product_id and move.product_id.weight * move.product_qty
volume = move.product_id and move.product_id.volume * move.product_qty
for quant in move.quant_ids:
total_cost += quant.cost
vals = dict(product_id=move.product_id.id, move_id=move.id, quantity=move.product_uom_qty, former_cost=total_cost * total_qty, weight=weight, volume=volume)
lines.append(vals)
if not lines:
raise osv.except_osv(_('Error!'), _('The selected picking does not contain any move that would be impacted by landed costs. Landed costs are only possible for products configured in real time valuation with real price costing method. Please make sure it is the case, or you selected the correct picking'))
return lines
_columns = {
'name': fields.char('Name', track_visibility='always', required=True, readonly=True, states={'draft':[('readonly',False)]}, copy=False),
'date': fields.date('Date', required=True, states={'done': [('readonly', True)]}, track_visibility='onchange', copy=False),
'picking_ids': fields.many2many('stock.picking', string='Pickings', states={'done': [('readonly', True)]}, copy=False),
'cost_lines': fields.one2many('stock.landed.cost.lines', 'cost_id', 'Cost Lines', states={'done': [('readonly', True)]}, copy=True)
|
,
'valuation_adjustment_lines': fields.one2many('stock.valuation.adjustment.lines', 'cost_id', 'Valuation Adjustments', states={'done': [('readonly', True)]}),
'description': fields.text('Item Description', states={'done': [('readonly', True)]}),
'amount_total': fields.function(_total_amount, type='float', string='Total', digits_compute=dp.get_precision('Account'),
store={
'stock.landed.cost': (lambda self, cr, uid, ids,
|
c={}: ids, ['cost_lines'], 20),
'stock.landed.cost.lines': (_get_cost_line, ['price_unit', 'quantity', 'cost_id'], 20),
}, track_visibility='always'
),
'state': fields.selection([('draft', 'Draft'), ('done', 'Posted'), ('cancel', 'Cancelled')], 'State', readonly=True, track_visibility='onchange', copy=False),
'account_move_id': fields.many2one('account.move', 'Journal Entry', readonly=True, copy=False),
'account_journal_id': fields.many2one('account.journal', 'Account Journal', required=True, states={'done': [('readonly', True)]}),
'company_id': fields.many2one('res.company', "Company", required=True, states={'done': [('readonly', True)]}),
'currency_id': fields.related('company_id','currency_id', type='many2one', relation="res.currency", string="Currency", readonly=True),
}
_defaults = {
'name': "/",
'state': 'draft',
'date': fields.date.context_today,
'company_id': lambda self, cr, uid, c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
}
def _create_accounting_entries(self, cr, uid, line, move_id, qty_out, context=None):
product_obj = self.pool.get('product.template')
cost_product = line.cost_line_id and line.cost_line_id.product_id
if not cost_product:
return False
accounts = product_obj.get_product_accounts(cr, uid, line.product_id.product_tmpl_id.id, context=context)
debit_account_id = accounts['property_stock_valuation_account_id']
already_out_account_id = accounts['stock_account_output']
credit_account_id = line.cost_line_id.account_id.id or cost_product.property_account_expense.id or cost_product.categ_id.property_account_expense_categ.id
if not credit_account_id:
raise osv.except_osv(_('Error!'), _('Please configure Stock Expense Account for product: %s.') % (cost_product.name))
return self._create_account_move_line(cr, uid, line, move_id, credit_account_id, debit_account_id, qty_out, already_out_account_id, context=context)
def _create_account_move_line(self, cr, uid, line, move_id, credit_account_id, debit_account_id, qty_out, already_out_account_id, context=None):
"""
Generate the account.move.line values to track the landed cost.
Afterwards, for the goods that are already out of stock, we should create the out moves
"""
aml_obj = self.pool.get('account.move.line')
base_line = {
'name': line.name,
'move_id': move_id,
'product_id': line.product_id.id,
'quantity': line.quantity,
}
debit_line = dict(base_line, account_id=debit_account_id)
credit_line = dict(base_line, account_id=credit_account_id)
diff = line.additional_landed_cost
if diff > 0:
debit_line['debit'] = diff
credit_line['credit'] = diff
else:
# negative cost, reverse the entry
debit_line['credit'] = -diff
credit_line['debit'] = -diff
aml_obj.create(cr, uid, debit_line, context=context)
aml_obj.create(cr, uid, credit_line, co
|
cstipkovic/spidermonkey-research
|
testing/mozharness/scripts/b2g_desktop_multilocale.py
|
Python
|
mpl-2.0
| 9,106
| 0.002745
|
#!/usr/bin/env python
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
import sys
import os
# load modules from parent dir
sys.path.insert(1, os.path.dirname(sys.path[0]))
# import the guts
from mozharness.base.config import parse_config_file
from mozharness.base.errors import MakefileErrorList
from mozharness.base.log import ERROR, FATAL
from mozharness.base.script import BaseScript
from mozharness.base.vcs.vcsbase import VCSMixin
from mozharness.mozilla.l10n.locales import GaiaLocalesMixin, LocalesMixin
class B2gMultilocale(LocalesMixin, BaseScript, VCSMixin, GaiaLocalesMixin):
""" This is a helper script that requires MercurialBuildFactory
logic to work. We may eventually make this a standalone
script.
We could inherit MercurialScript instead of BaseScript + VCSMixin
"""
config_options = [
[["--locale"], {
"action": "extend",
"dest": "locales",
"type": "string",
"help": "Specify the locale(s) to repack"
}],
[["--gaia-languages-file"], {
"dest": "gaia_languages_file",
"help": "languages file for gaia multilocale profile",
}],
[["--gecko-languages-file"], {
"dest": "locales_file",
"help": "languages file for gecko multilocale",
}],
[["--gecko-l10n-root"], {
"dest": "hg_l10n_base",
"help": "root location for gecko l10n repos",
}],
[["--gecko-l10n-base-dir"], {
"dest": "l10n_dir",
"help": "dir to clone gecko l10n repos into, relative to the work directory",
}],
[["--merge-locales"], {
"dest": "merge_locales",
"help": "Dummy option to keep from burning. We now always merge",
}],
[["--gaia-l10n-root"], {
"dest": "gaia_l10n_root",
"help": "root location for gaia l10n repos",
}],
[["--gaia-l10n-base-dir"], {
"dest": "gaia_l10n_base_dir",
"default": "build-gaia-l10n",
"help": "dir to clone l10n repos into, relative to the work directory",
}],
[["--gaia-l10n-vcs"], {
|
"dest": "gaia_l10n_vcs",
"help": "vcs to use for gaia l10n",
}],
]
def __init__(self, require_config_file=False):
LocalesMixin.__init__(self)
BaseScript.__init__(self,
config_options=self.config_options,
|
all_actions=[
'pull',
'build',
'summary',
],
require_config_file=require_config_file,
# Default configuration
config={
'gaia_l10n_vcs': 'hg',
'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
'locales_dir': 'b2g/locales',
'l10n_dir': 'gecko-l10n',
# I forget what this was for. Copied from the Android multilocale stuff
'ignore_locales': ["en-US", "multi"],
# This only has 2 locales in it. We probably need files that mirror gaia's locale lists
# We need 2 sets of locales files because the locale names in gaia are different than gecko, e.g. 'es' vs 'es-ES'
# We'll need to override this for localizer buidls
'locales_file': 'build/b2g/locales/all-locales',
'mozilla_dir': 'build',
'objdir': 'obj-firefox',
'merge_locales': True,
'work_dir': '.',
'vcs_output_timeout': 600, # 10 minutes should be enough for anyone!
},
)
def _pre_config_lock(self, rw_config):
super(B2gMultilocale, self)._pre_config_lock(rw_config)
if 'pull' in self.actions:
message = ""
if 'gaia_languages_file' not in self.config:
message += 'Must specify --gaia-languages-file!\n'
if 'gaia_l10n_root' not in self.config:
message += 'Must specify --gaia-l10n-root!\n'
if 'locales_file' not in self.config:
message += 'Must specify --gecko-languages-file!\n'
if 'hg_l10n_base' not in self.config:
message += 'Must specify --gecko-l10n-root!\n'
if message:
self.fatal(message)
def query_abs_dirs(self):
if self.abs_dirs:
return self.abs_dirs
abs_dirs = LocalesMixin.query_abs_dirs(self)
c = self.config
dirs = {
'src': os.path.join(c['work_dir'], 'gecko'),
'work_dir': abs_dirs['abs_work_dir'],
'gaia_l10n_base_dir': os.path.join(abs_dirs['abs_work_dir'], self.config['gaia_l10n_base_dir']),
'abs_compare_locales_dir': os.path.join(abs_dirs['base_work_dir'], 'compare-locales'),
}
abs_dirs.update(dirs)
self.abs_dirs = abs_dirs
return self.abs_dirs
# Actions {{{2
def pull(self):
""" Clone gaia and gecko locale repos
"""
languages_file = self.config['gaia_languages_file']
l10n_base_dir = self.query_abs_dirs()['gaia_l10n_base_dir']
l10n_config = {
'root': self.config['gaia_l10n_root'],
'vcs': self.config['gaia_l10n_vcs'],
}
self.pull_gaia_locale_source(l10n_config, parse_config_file(languages_file).keys(), l10n_base_dir)
self.pull_locale_source()
gecko_locales = self.query_locales()
# populate b2g/overrides, which isn't in gecko atm
dirs = self.query_abs_dirs()
for locale in gecko_locales:
self.mkdir_p(os.path.join(dirs['abs_l10n_dir'], locale, 'b2g', 'chrome', 'overrides'))
self.copytree(os.path.join(dirs['abs_l10n_dir'], locale, 'mobile', 'overrides'),
os.path.join(dirs['abs_l10n_dir'], locale, 'b2g', 'chrome', 'overrides'),
error_level=FATAL)
def build(self):
""" Do the multilocale portion of the build + packaging.
"""
dirs = self.query_abs_dirs()
gecko_locales = self.query_locales()
make = self.query_exe('make', return_type='string')
env = self.query_env(
partial_env={
'LOCALE_MERGEDIR': dirs['abs_merge_dir'].replace(os.sep, '/'),
'MOZ_CHROME_MULTILOCALE': 'en-US ' + ' '.join(gecko_locales),
}
)
merge_env = self.query_env(
partial_env={
'PATH': '%(PATH)s' + os.pathsep + os.path.join(dirs['abs_compare_locales_dir'], 'scripts'),
'PYTHONPATH': os.path.join(dirs['abs_compare_locales_dir'],
'lib'),
}
)
for locale in gecko_locales:
command = make + ' merge-%s L10NBASEDIR=%s LOCALE_MERGEDIR=%s' % (
locale, dirs['abs_l10n_dir'], dirs['abs_merge_dir'].replace(os.sep, '/'))
status = self.run_command(command,
cwd=dirs['abs_locales_dir'],
error_list=MakefileErrorList,
env=merge_env)
command = make + ' chrome-%s L10NBASEDIR=%s LOCALE_MERGEDIR=%s' % (
locale, dirs['abs_l10n_dir'], dirs['abs_merge_dir'].replace(os.sep, '/'))
status = self.run_command(command,
cwd=dirs['abs_locales_dir'],
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.