repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
choderalab/FAHMunge | fahmunge/_version.py | Python | lgpl-2.1 | 18,449 | 0 |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "None"
cfg.versionfile_source = "fahmunge/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
roo | tdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
| "dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
|
tfXYZ/tfXYZ | apps/classification.py | Python | gpl-3.0 | 4,593 | 0.014805 | # Import from standard libraries
from __future__ import division, absolute_import, print_function
import tensorflow as tf, importlib
from six import iteritems
# Import from our libraries
import core.losses
from core.common import Channel, get_debug_session
from core.evaluation import basic_stats_numpy, classification_accuracy
from core.models import common_branch
from apps.base import BaseApp
# The flags
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('loss_function', 'core.losses.ce_loss', '')
tf.app.flags.DEFINE_string('numpy_channels', '', '')
class Classification(BaseApp):
"""
Basic application class for a classification task with a single/multiple label(s), ce loss, an endpoint
called 'bottleneck'
"""
def __init__(self, n_classes, labels, name, dataset_dir, **kwargs):
super(Classification, self).__init__(name, dataset_dir, **kwargs)
self.test_samples = 1
self.test_mb = 100
self.raw_name = self.name.split('_')[0]
# Just to allow non-lists in case of only one label
if not isinstance(labels, list):
self.labels = [labels]
self.n_classes = [n_classes]
else:
self.labels = labels
self.n_classes = n_classes
def create_filename_queue(self, is_train, files, lengths):
return tf.train.string_input_producer(files, capacity=500000)
def precache_processing(self, is_train, val_dict):
if is_train:
return val_dict
return {k: tf.expand_dims(v, 0) for k, v in iteritems(val_dict)}
def postcache_processing(self, is_train, val_dict, tfr_structure):
return val_dict
def compute_loss(self, global_endpoints, module_endpoints):
# Compute the different losses
losses = []
for l, n in zip(self.labels, self.n_classes):
logit_name = '{}_{}_logits'.format(self.raw_name, l)
sep_index = FLAGS.loss_function.rindex('.')
containing_module = importlib.import_module(FLAGS.loss_function[:sep_index])
loss_func = getattr(containing_module, FLAGS.loss_function[sep_index+1:])
loss = loss_func(module_endpoints[logit_name],
module_endpoints[l],
n_classes=n)
losses.append(loss)
# Add summaries
loss_name = self.raw_name if len(self.labels) == 1 else '{}_{}'.format(self.raw_name, l)
tf.summary.scalar('{}_loss'.format(loss_name),
loss)
return losses
def monitoring_channels(self, is_train, global_endpoints, module_endpoints):
channels = {}
for l in self.labels:
logit_name = '{}_{}_logits'.format(self.raw_name, l)
logits = module_endpoints[logit_name]
labels = module_endpoints[l]
if is_train:
accuracy_name = 'accuracy' if len(self.labels) == 1 else '{}_{}_accuracy'.format(self.raw_name, l)
channels[accuracy_name] = classification_accuracy(is_train, logits, labels)
else:
channels[logit_name] = Channel(logits, False, True)
channels['{}_{}_labels'.format(self.raw_name, l)] = Channel(labels, False, True)
return channels
def numpy_channels(self, concate_aggregators, step):
ret = {}
if FLAGS.numpy_channels:
sep_index = FLAGS.numpy_channels.rindex('.')
containing_module = importlib.import_module(FLAGS.numpy_channels[:sep_index])
additional_channels = getattr(containing_module, FLAGS.numpy_channels[sep_index+1:])
ret = additional_channels(self, concate_aggregators, step)
for l in self.labels:
logit_name = '{}_{}_logits'.format(self.raw_name, l)
logits = concate_aggregators[logit_name]
labels = concate_aggregators['{}_{}_labels'.format(self.raw_name, l)]
probs, preds, labels, acc, uar, conf_mat, auprc, acc_at_3, acc_at_5 = basic_stats_numpy(logits, labels, self.test_samples)
name_prefix = '' if len(self.labels | ) == 1 else '{}_'.format(l)
ret.update({'{}UAR'.format(name_prefix): uar,
'{}acc'.format(name_prefix): acc,
'{}conf_mat'.format(name_prefix): conf_mat,
'{}auprc'. | format(name_prefix): auprc,
'{}acc@3'.format(name_prefix): acc_at_3,
'{}acc@5'.format(name_prefix): acc_at_5})
return ret
def top_layers(self, is_train, global_endpoints, module_endpoints):
inputs = module_endpoints['bottleneck']
# The top layers
for l, n in zip(self.labels, self.n_classes):
out = common_branch(is_train, inputs, n, '{}_{}_top'.format(self.raw_name, l))
logit_name = '{}_{}_logits'.format(self.raw_name, l)
module_endpoints[logit_name] = out
|
irl/gajim | src/gajim-remote.py | Python | gpl-3.0 | 26,570 | 0.005608 | # -*- coding:utf-8 -*-
## src/gajim-remote.py
##
## Copyright (C) 2005-2006 Dimitur Kirov <dkirov AT gmail.com>
## Nikos Kouremenos <kourem AT gmail.com>
## Copyright (C) 2005-2014 Yann Leboulanger <asterix AT lagaule.org>
## Copyright (C) 2006 Junglecow <junglecow AT gmail.com>
## Travis Shirk <travis AT pobox.com>
## Copyright (C) 2006-2008 Jean-Marie Traissard <jim AT lapin.org>
## Copyright (C) 2007 Julien Pivotto <roidelapluie AT gmail.com>
##
## This file is part of Gajim.
##
## Gajim is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published
## by the Free Software Foundation; version 3 only.
##
## Gajim is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
##
# gajim-remote help will show you the D-BUS API of Gajim
import sys
import locale
import urllib
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL) # ^C exits the application
from common import exceptions
from common import i18n # This installs _() function
from common.i18n import Q_
try:
PREFERRED_ENCODING = locale.getpreferredencoding()
except Exception:
PREFERRED_ENCODING = 'UTF-8'
def send_error(error_message):
'''Writes error message to stderr and exits'''
print(error_message, file=sys.stderr)
sys.exit(1)
try:
import dbus
import dbus.service
import dbus.glib
# test if dbus-x11 is installed
bus = dbus.SessionBus()
except Exception:
print(_('D-Bus is not present on this machine or python module is missing'))
sys.exit(1)
OBJ_PATH = '/org/gajim/dbus/RemoteObject'
INTERFACE = 'org.gajim.dbus.RemoteInterface'
SERVICE = 'org.gajim.dbus'
BASENAME = 'gajim-remote'
class GajimRemote:
def __init__(self):
self.argv_len = len(sys.argv)
# define commands dict. Prototype :
# {
# 'command': [comment, [list of arguments] ]
# }
#
# each argument is defined as a tuple:
# (argument name, help on argument, is mandatory)
#
self.commands = {
'help': [
_('Shows a help on specific command'),
[
#User gets help for the command, specified by this parameter
(_('command'),
_('show help on command'), False)
]
],
'toggle_roster_appearance': [
_('Shows or hides the roster window'),
[]
],
'show_next_pending_event': [
_('Pops up a window with the next pending event'),
[]
],
'list_contacts': [
_('Prints a list of all contacts in the roster. Each contact '
'appears on a separate line'),
[
(Q_('?CLI:account'), _('show only contacts of the given account'),
False)
]
],
'list_accounts': [
_('Prints a list of registered accounts'),
[]
],
'change_status': [
_('Changes the status of account or accounts'),
[
#offline, online, chat, away, xa, dnd, invisible should not be translated
(Q_('?CLI:status'), _('one of: offline, online, chat, away, xa, dnd, invisible. If not set, use account\'s previous status'), False),
(Q_('?CLI:message'), _('status message'), False),
(Q_('?CLI:account'), _('change status of account "account". '
'If not specified, try to change status of all accounts that have '
'"sync with global status" option set'), False)
]
],
'set_priority': [
_('Changes the priority of account or accounts'),
[
(Q_('?CLI:priority') | , _('priority you want to give to the account'),
| True),
(Q_('?CLI:account'), _('change the priority of the given account. '
'If not specified, change status of all accounts that have'
' "sync with global status" option set'), False)
]
],
'open_chat': [
_('Shows the chat dialog so that you can send messages to a contact'),
[
('jid', _('JID of the contact that you want to chat with'),
True),
(Q_('?CLI:account'), _('if specified, contact is taken from the '
'contact list of this account'), False),
(Q_('?CLI:message'),
_('message content. The account must be specified or ""'),
False)
]
],
'send_chat_message': [
_('Sends new chat message to a contact in the roster. Both OpenPGP key '
'and account are optional. If you want to set only \'account\', '
'without \'OpenPGP key\', just set \'OpenPGP key\' to \'\'.'),
[
('jid', _('JID of the contact that will receive the message'), True),
(Q_('?CLI:message'), _('message contents'), True),
(_('pgp key'), _('if specified, the message will be encrypted '
'using this public key'), False),
(Q_('?CLI:account'), _('if specified, the message will be sent '
'using this account'), False),
]
],
'send_single_message': [
_('Sends new single message to a contact in the roster. Both OpenPGP key '
'and account are optional. If you want to set only \'account\', '
'without \'OpenPGP key\', just set \'OpenPGP key\' to \'\'.'),
[
('jid', _('JID of the contact that will receive the message'), True),
(_('subject'), _('message subject'), True),
(Q_('?CLI:message'), _('message contents'), True),
(_('pgp key'), _('if specified, the message will be encrypted '
'using this public key'), False),
(Q_('?CLI:account'), _('if specified, the message will be sent '
'using this account'), False),
]
],
'send_groupc |
apache/cloudstack-ec2stack | tests/utils.py | Python | apache-2.0 | 2,163 | 0 | #!/usr/bin/env python
# encoding: utf-8
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, | Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless | required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
class FlaskTestCaseMixin(object):
@staticmethod
def _html_data(kwargs):
if not kwargs.get('content_type'):
kwargs['content_type'] = 'application/x-www-form-urlencoded'
return kwargs
@staticmethod
def _request(method, *args, **kwargs):
return method(*args, **kwargs)
def post(self, *args, **kwargs):
return (
self._request(self.client.post, *args, **self._html_data(kwargs))
)
def assert_status_code(self, response, status_code):
self.assertEquals(status_code, response.status_code)
return response
def assert_ok(self, response):
return self.assert_status_code(response, 200)
def assert_bad_request(self, response):
return self.assert_status_code(response, 400)
def assert_not_found(self, response):
return self.assert_status_code(response, 404)
@staticmethod
def get_example_data():
data = {
'SignatureVersion': '2',
'AWSAccessKeyId': 'ExampleAPIKey',
'Version': '2013-10-15',
'Timestamp': '2014-02-19T23:34:43.868347',
'SignatureMethod': 'HmacSHA256',
'Signature': 'g7HMf6RY4oCeaBaea0zlObjVX43NEH8yv3pclvu+Ibo=',
'Action': 'CreateKeyPair'
}
return data
|
archivsozialebewegungen/AlexandriaBase | tests/servicestests/test_database_upgrade_service.py | Python | gpl-3.0 | 1,154 | 0.006066 | '''
Created on 28.06.2016
@author: michael
'''
import unittest
from alexandriabase.services import DatabaseUpgradeService
from daotests.test_base import DatabaseBaseTest
from alexandriabase.daos import RegistryDao
class DatabaseUpgradeServiceTest(Da | tabaseBaseTest):
def setUp(self):
super().setUp()
self.upgrade_service = DatabaseUpgradeService(self.engine)
def tearDown(self):
super().tearDown()
def test | Upgrade(self):
self.assertTrue(self.upgrade_service.is_update_necessary())
self.upgrade_service.run_update()
self.assertFalse(self.upgrade_service.is_update_necessary())
def testFailingUpgrade(self):
registry_dao = RegistryDao(self.engine)
registry_dao.set('version', 'not_existing')
self.assertTrue(self.upgrade_service.is_update_necessary())
expected_exception = False
try:
self.upgrade_service.run_update()
except Exception:
expected_exception = True
self.assertTrue(expected_exception)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() |
treeform/pystorm | examples/helloworld.py | Python | mit | 147 | 0.006803 | # helloworld.py
#
# familiar test program, demonst | rating py2js conversion
def helloworld(suffix):
print "hello world"+suffix
helloworld("!")
| |
OpenDroneMap/WebODM | app/tests/test_app.py | Python | agpl-3.0 | 9,412 | 0.001381 | from django.contrib.auth.models import User, Group
from django.test import Client
from rest_framework import status
from app.models import Project, Task
from app.models import Setting
from app.models import Theme
from webodm import settings
from .classes import BootTestCase
from django.core.exceptions import ValidationError
class TestApp(BootTestCase):
fixtures = ['test_processingnodes', ]
def setUp(self):
self.credentials = {
'username': 'testuser',
'password': 'test1234',
'email': 'test@mail.com'}
# Create a test Group
my_group, created = Group.objects.get_or_create(name='test_group')
# Add user to test Group
User.objects.get(pk=1).groups.add(my_group)
def test_user_login(self):
c = Client()
# User points the browser to the landing page
res = c.post('/', follow=True)
# the user is not logged in
self.assertFalse(res.context['user'].is_authenticated)
# and is redirected to the login page
self.assertRedirects(res, '/login/')
# The login page is being rendered by the correct template
self.assertTemplateUsed(res, 'registration/login.html')
# asks the user to login using a set of valid credentials
res = c.post('/login/', data=self.credentials, follow=True)
# The system acknowledges him
self.assertTrue(res.context['user'].is_authenticated)
# and moves him at the dashboard
self.assertTemplateUsed(res, 'app/dashboard.html')
def test_views(self):
c = Client()
# Connecting to dashboard without auth redirects to /
res = c.get('/dashboard/', follow=True)
self.assertFalse(res.context['user'].is_authenticated)
self.assertRedirects(res, '/login/?next=/dashboard/')
res = c.get('/processingnode/1/', follow=True)
self.assertRedirects(res, '/login/?next=/processingnode/1/')
res = c.get('/map/project/1/', follow=True)
self.assertRedirects(res, '/login/?next=/map/project/1/')
res = c.get('/3d/project/1/task/1/', follow=True)
self.assertRedirects(res, '/login/?next=/3d/project/1/task/1/')
# Login
c.post('/login/', data=self.credentials, follow=True)
# We should have a project created from the dashboard
self.assertTrue(Project.objects.count() >= 1)
# Can access API page
res = c.get('/api/')
self.assertTrue(res.status_code == status.HTTP_200_OK)
# We can access a processingnode view that exists
res = c.get('/processingnode/1/')
self.assertTrue(res.status_code == 200)
self.assertTemplateUsed(res, 'app/processing_node.html')
# We can access a processingnode that is offline
# (and there's a warning message when we do that)
res = c.get('/processingnode/2/')
self.assertTrue(res.status_code == 200)
self.assertTemplateUsed(res, 'app/processing_node.html')
message = list(res.context['messages'])[0]
self.assertEqual(message.tags, 'warning')
self.assertTrue("offline" in message.message)
res = c.get('/processingnode/9999/')
self.assertTrue(res.status_code == 404)
res = c.get('/processingnode/abc/')
self.assertTrue(res.status_code == 404)
# /map/ and /3d/ views
user = User.objects.get(username="testuser")
other_user = User.objects.get(username="testuser2")
project = Project.objects.create(owner=user)
task = Task.objects.create(project=project)
other_project = Project.objects.create(owner=other_user)
other_task = Task.objects.create(project=other_project)
# Cannot access a project that we have no access to, or that does not exist
for project_id in [other_project.id, 99999]:
res = c.get('/map/project/{}/'.format(project_id))
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
# We can access a project that we have access to
res = c.get('/map/project/{}/'.format(project.id))
self.assertTrue(res.status_code == status.HTTP_200_OK)
# 3D views need project and task parameters
res = c.get('/3d/project/{}/'.format(project.id))
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
# Cannot access a 3d view for a task we have no access to
res = c.get('/3d/project/{}/task/{}/'.format(other_project.id, other_task.id))
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
# Can access 3d view for task we have access to
res = c.get('/3d/project/{}/task/{}/'.format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_200_OK)
# Cannot access public URLs unless a task is shared
def test_public_views(client, expectedStatus):
res = client.get('/public/task/{}/map/'.format(task.id))
self.assertTrue(res.status_code == expectedStatus)
res = client.get('/public/task/{}/3d/'.format(task.id))
self.assertTrue(res.status_code == expectedStatus)
res = client.get('/public/task/{}/iframe/3d/'.format(task.id))
self.assertTrue(res.status_code == expectedStatus)
res = client.get('/public/task/{}/iframe/map/'.format(task.id))
self.assertTrue(res.status_code == expectedStatus)
res = client.get('/public/task/ | {}/json/'.format(task.id))
self.assertTrue(res.status_code == expectedStatus)
test_public_views(c, status.HTTP_404_NOT_FOUND)
# Share task
task.public = True
task.save()
# Can now access URLs even as anonymous user
ac = Client()
test_public_views(ac, s | tatus.HTTP_200_OK)
def test_admin_views(self):
c = Client()
c.login(username='testsuperuser', password='test1234')
settingId = Setting.objects.all()[0].id # During tests, sometimes this is != 1
themeId = Theme.objects.all()[0].id # During tests, sometimes this is != 1
# Can access admin menu items
admin_menu_items = ['/admin/app/setting/{}/change/'.format(settingId),
'/admin/app/theme/{}/change/'.format(themeId),
'/admin/',
'/admin/app/plugin/',
'/admin/auth/user/',
'/admin/auth/group/',
]
for url in admin_menu_items:
res = c.get(url)
self.assertEqual(res.status_code, status.HTTP_200_OK)
# Cannot access dev tools (not in dev mode)
settings.DEV = False
self.assertEqual(c.get('/dev-tools/').status_code, status.HTTP_404_NOT_FOUND)
settings.DEV = True
# Can access in dev mode
self.assertEqual(c.get('/dev-tools/').status_code, status.HTTP_200_OK)
# Cannot access admin views as normal user
c.logout()
c.login(username='testuser', password='test1234')
# Can never access dev tools as user, even in dev mode
self.assertRedirects(c.get('/dev-tools/', follow=True), '/login/?next=/dev-tools/')
settings.DEV = False
for url in admin_menu_items:
res = c.get(url, follow=True)
self.assertRedirects(res, '/admin/login/?next={}'.format(url))
def test_default_group(self):
# It exists
self.assertTrue(Group.objects.filter(name='Default').count() == 1)
# Verify that all new users are assigned to default group
u = User.objects.create_user(username="default_user")
u.refresh_from_db()
self.assertTrue(u.groups.filter(name='Default').count() == 1)
def test_projects(self):
# Get a normal user
user = User.objects.get(username="testuser")
self.assertFalse(user.is_superuser)
# Create a new project
p = Project.objects.create(owner=user, name="test")
# Have the proper permissions been set?
self.assertTrue(user.has_perm("view_project", p))
s |
endlessm/chromium-browser | third_party/catapult/dashboard/dashboard/edit_bug_labels_test.py | Python | bsd-3-clause | 2,381 | 0.00168 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import unittest
import webapp2
import webtest
from google.appengine.api import users
from dashboard import edit_bug_labels
from dashboard.c | ommon import testing_common
from dashboard.common import xsrf
from dashboard.models import bug_label_patterns
class EditBugLabelsTest(testing_common.TestCase):
def setUp(self):
super(EditBugLabelsTest, self).setUp()
app = webapp2.WSGIApplication(
| [('/edit_bug_labels', edit_bug_labels.EditBugLabelsHandler)])
self.testapp = webtest.TestApp(app)
# Set the current user to be an admin.
self.SetCurrentUser('x@google.com', is_admin=True)
def tearDown(self):
super(EditBugLabelsTest, self).tearDown()
self.UnsetCurrentUser()
def testBugLabelPattern_AddAndRemove(self):
self.testapp.post('/edit_bug_labels', {
'action': 'add_buglabel_pattern',
'buglabel_to_add': 'Performance-1',
'pattern': '*/*/Suite1/*',
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
# The list of patterns should now contain the pattern that was added.
self.assertEqual(
['*/*/Suite1/*'],
bug_label_patterns.GetBugLabelPatterns()['Performance-1'])
# Add another pattern for the same bug label.
self.testapp.post('/edit_bug_labels', {
'action': 'add_buglabel_pattern',
'buglabel_to_add': 'Performance-1',
'pattern': '*/*/Suite2/*',
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
# The list of patterns should now contain both patterns.
self.assertEqual(
['*/*/Suite1/*', '*/*/Suite2/*'],
bug_label_patterns.GetBugLabelPatterns()['Performance-1'])
# Remove the BugLabelPattern entity.
self.testapp.post('/edit_bug_labels', {
'action': 'remove_buglabel_pattern',
'buglabel_to_remove': 'Performance-1',
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
# It should now be absent from the datastore.
self.assertNotIn(
'Performance-1', bug_label_patterns.GetBugLabelPatterns())
if __name__ == '__main__':
unittest.main()
|
tobiz/OGN-Flight-Logger_V3 | flogger3.py | Python | gpl-3.0 | 77,519 | 0.010359 | #
# FLOGGER
#
# This program reads records from the OGN network processing only
# those received from a specified site and registration marks, eg aircraft belonging to
# a specific club.
# It writes each record to a database and at the end of each day process
# them to determine the flight times of each flight for each machine.
# Phase 1 will just collect the data.
# Phase 2 will process the data into a new table
# Phase 3 will then format that information with the intention of
# it being used to be checked against the manual log books.
# Phase 4 will remove old flight and track file older than a certain date
# The intention is that it will collect the data between the hours of daylight,
# producing the summary at the end of the day.
# This program could be run on a Raspberry Pi as it is so low powered
#
# Altitude in metres.
# Land speed in km/h.
# Latitude, west is negative decimal degrees.
# Longitude, south is negative decimal degrees.
#
# This program is covered by the GNU GENERAL PUBLIC LICENSE.
# See the file 'LICENSE' for details
#
#
# 20150312: First working version
# Usage: Run flogger.py to collect the daily flight data then
# run process.py which processes the raw data into a table flights in the database flogger.sgl3
# This first version is very experimental, it is proof of concept and processes. The code needs to
# be 'improved'.
# To be done: 1) The program should be run each day between 0900 and sunset. This should be handled by cron
# to start the program at a time specified in settings which then calculates sunrise and suspends
# until then. Once running the program determines sunset and stopping itself at that time. It also needs
# to handle power outages (not sure how at the moment)
# 2) The Flarm code to registration code needs to addressed using OGNs new database.
# 20150505 Second working version
# Only need to run flogger.py, it now handles collection of data during daylight hours and processes
# after sunset (assumes gliders only fly during daylight hours)
# Now reads aircraft registration data from Flarmnet to build own internal table
# 20150515 Third working version
# 1) APRS user and APRS passcode have to be supplied on the command line and not in settings
# 2) Changes to flogger_process_log_old to correct errors - still in testing
#
# 20150520 Fourth working version (V0.1.0)
# 1) On aircraft stop set altitude to initial value else highest value for any flight of the day
# will be the one compared against as the maximum and not the max for a specific flight.
# Bug 20150520-1 Assigned
# 2) Flights table only contains flights for one day and not all previous days flights
# Bug 20150520-2 Assigned
#
# 20150527 Fifth working version (V0.1.1)
# Test version for:
# 1) Bug 20150520-1
# 2) Bug 20150520-2
#
# 20150529 First beta test version (V0.2.0)
# 1) Bug 20150520-1 Solved
# 2) Bug 20150520-2 Solved
# 3) Enhancement - dump days flights table as .csv file
#
# 20150530 Correction to first beta test version (V0.2.1)
# 1) Correction to dump flights to .csv - to make it work!
#
# 20150604 Added enhancements to version V0.2 (V0.2.2)
# 1) Allowance for short duration flight
# 2) Use of geocoding to determine airfield position data - proposed by D.Spreitz
#
# To be done: 1) Tidy up code, remove all redundant testing comments
# 2) A lot more testing - some features might still not work!
# 3) Consider how this may be run as a service with standard start, stop etc options
# 4) Consider adding full logging with levels
# 5) Review the algorithm to determine if aircraft is on the ground. At the moment it determines
# this by the GPS ground speed being zero (ie below a defined value); the ground speed could be zero
# if the wind speed and airspeed are the same but opposite, eg when ridge flying. The algorithm could use
# the altitude as well, eg if ground speed is zero but altitude is greater than home airfield altitude then
# 'we're flying'. Note this still has issues!
# 6) Need to consider sending 'keep alives' when in the sleep state. Solved, not needed
# 7) There's a problem concerning character codes when building the flarm database which needs solving, only show in 1 record
#
# 20160208 1) Add modification to sequence tracks per flight by flarm record timestamp. Using multiple beacons can result in
# track points that are out of sequence when based on order received due to Internet time delays, hence
# use the GPS timestamp recorded in the data taken and sent by flarm (assumes timestamp is from Flarm!).
# 2) Also added graceful exit on Cntrl-C
#
# 20160323 1) Added optional output of track data in IGC format
# 2) Added optional deletion of old flight .csv and track .csv/.igc files
#
# 20160514 1) Use $ pipreqs --force /path/to/project to generate requirements.txt for pip install
#
# 20160518 1) Added attempt to load earlier version Linux libfap if current fails
#
# 20161026 1) Added flogger_find_tug code. This tries to determine which tug, if any, launched a particular glider.
# Note this doesn't always get the right result, but then nor does OGN Flight Log! This could be due to tugs
# sometimes powering down if a launch is not imminent. Gliders are likely to be always powered on and Flarm operating.
# Hence when it becomes time to launch the tug powers up, Flarm is now on but takes some time for the signal to be
# acquired and put onto and processed by the APRS system. It is therefore possible for the launch to take place
# with the take-off times for tug and glider to be too far displaced (from the APRS data) for flogger-find-tug
# to determine the launch has happened. The solution is possibly to increase the time delta used between glider and
# tug take-off but this could result in false positives, some fine tuning maybe needed. Interested to know if
# OGN Flight Log has similar reasoning.
#
# 20161108 1) Rewrote phase 2 flight log processing to be much simpler. Phase 2 puts flights into the flight_group
# table such that all flights by a single aircraft have the same group id. This enables each flight to
# be determined to be a distinct flight from its predecessor or not.
#
# 20170201: 1) Added simple function test_YorN to test for Y|y or N|n
# | 2) Started developing using Eclipse Neon.2 (4.6.2)
#
import socket
#from libfap import *
#import flogger_settings
import string
import date | time
import time
import sqlite3
import pytz
from datetime import timedelta
import sys
from flarm_db import flarmdb
from pysqlite2 import dbapi2 as sqlite
from open_db import opendb
import ephem
#from flogger_process_log_old import process_log
#from flogger_process_log import process_log
import argparse
from flogger_dump_flights import dump_flights
from flogger_dump_tracks import dump_tracks2
from flogger_get_coords import get_coords
from flogger_signals import sig_handler
import signal
import os
import os.path
from flogger_dump_IGC import dump_IGC
from flogger_email_log import email_log2
from flogger_landout import landout_check
from geopy.distance import vincenty
from flogger_email_msg import email_msg
from flogger_find_tug import find_tug
from flogger_test_YorN import test_YorN
from flogger_gui import *
from flogger_settings import *
from threading import Th |
JonathonReinhart/killerbee | killerbee/kbutils.py | Python | bsd-3-clause | 22,117 | 0.011439 | # Import USB support depending on version of pyUSB
try:
import usb.core
import usb.util
#import usb.backend.libusb01
#backend = usb.backend.libusb01.get_backend()
USBVER=1
except ImportError:
import usb
#print("Warning: You are using pyUSB 0.x, future deprecation planned.")
USBVER=0
import serial
import os, glob
import time
import random
import inspect
from struct import pack
from config import * #to get DEV_ENABLE_* variables
# Known devices by USB ID:
RZ_USB_VEND_ID = 0x03EB
RZ_USB_PROD_ID = 0x210A
ZN_USB_VEND_ID = 0x04D8
ZN_USB_PROD_ID = 0x000E
#FTDI_USB_VEND_ID = 0x0403
#FTDI_USB_PROD_ID = 0x6001 #this is also used by FDTI cables used to attach gps
FTDI_X_USB_VEND_ID = 0x0403
FTDI_X_USB_PROD_ID = 0x6015 #api-mote FTDI chip
usbVendorList = [RZ_USB_VEND_ID, ZN_USB_VEND_ID]
usbProductList = [RZ_USB_PROD_ID, ZN_USB_PROD_ID]
# Global variables
gps_devstring = None
class KBCapabilities:
'''
Class to store and report on the capabilities of a specific KillerBee device.
'''
NONE = 0x00 #: Capabilities Flag: No Capabilities
SNIFF = 0x01 #: Capabilities Flag: Can Sniff
SETCHAN = 0x02 #: Capabilities Flag: Can Set the Channel
INJECT = 0x03 #: Capabilities Flag: Can Inject Frames
PHYJAM = 0x04 #: Capabilities Flag: Can Jam PHY Layer
SELFACK = 0x05 #: Capabilities Flag: Can ACK Frames Automatically
PHYJAM_REFLEX = 0x06 #: Capabilities Flag: Can Jam PHY Layer Reflexively
SET_SYNC = 0x07 #: Capabilities Flag: Can set the register controlling 802.15.4 sync byte
FREQ_2400 = 0x08 #: Capabilities Flag: Can preform 2.4 GHz sniffing (ch 11-26)
FREQ_900 = 0x09 #: Capabilities Flag: Can preform 900 MHz sniffing (ch 1-10)
def __init__(self):
self._capabilities = {
self.NONE : False,
self.SNIFF : False,
self.SETCHAN : False,
self.INJECT : False,
self.PHYJAM : False,
self.SELFACK: False,
self.PHYJAM_REFLEX: False,
self.SET_SYNC: False,
self.FREQ_2400: False,
self.FREQ_900: False }
def check(self, capab):
if capab in self._capabilities:
return self._capabilities[capab]
else:
return False
def getlist(self):
return self._capabilities
def setcapab(self, capab, value):
self._capabilities[capab] = value
def require(self, capab):
if self.check(capab) != True:
raise Exception('Selected hardware does not support required capability (%d).' % capab)
def is_valid | _channel(self, channel):
'''
Based on sniffer capabilities, return if t | his is an OK channel number.
@rtype: Boolean
'''
if (channel >= 11 or channel <= 26) and self.check(self.FREQ_2400):
return True
elif (channel >= 1 or channel <= 10) and self.check(self.FREQ_900):
return True
return False
class findFromList(object):
'''
Custom matching function for pyUSB 1.x.
Used by usb.core.find's custom_match parameter.
'''
def __init__(self, vendors_, products_):
'''Takes a list of vendor IDs and product IDs.'''
self._vendors = vendors_
self._products = products_
def __call__(self, device):
'''
Returns True if the device being searched
is in these lists.
'''
if (device.idVendor in self._vendors) and \
(device.idProduct in self._products):
return True
return False
class findFromListAndBusDevId(findFromList):
'''
Custom matching function for pyUSB 1.x.
Used by usb.core.find's custom_match parameter.
'''
def __init__(self, busNum_, devNum_, vendors_, products_):
'''Takes a list of vendor IDs and product IDs.'''
findFromList.__init__(self, vendors_, products_)
self._busNum = busNum_
self._devNum = devNum_
def __call__(self, device):
'''
Returns True if the device being searched
is in these lists.
'''
if findFromList.__call__(self, device) and \
(self._busNum == None or device.bus == self._busNum) and \
(self._devNum == None or device.address == self._devNum) :
return True
return False
def devlist_usb_v1x(vendor=None, product=None):
'''
Private function. Do not call from tools/scripts/etc.
'''
devlist = []
if vendor == None: vendor = usbVendorList
else: vendor = [vendor]
if product == None: product = usbProductList
else: product = [product]
devs = usb.core.find(find_all=True, custom_match=findFromList(vendor, product)) #backend=backend,
try:
for dev in devs:
# Note, can use "{0:03d}:{1:03d}" to get the old format,
# but have decided to move to the new, shorter format.
devlist.append(["{0}:{1}".format(dev.bus, dev.address), \
usb.util.get_string(dev, dev.iProduct), \
usb.util.get_string(dev, dev.iSerialNumber)])
except usb.core.USBError as e:
if e.errno == 13: #usb.core.USBError: [Errno 13] Access denied (insufficient permissions)
raise Exception("Unable to open device. " +
"Ensure the device is free and plugged-in. You may need sudo.")
else:
raise e
return devlist
def devlist_usb_v0x(vendor=None, product=None):
'''
Private function. Do not call from tools/scripts/etc.
'''
devlist = []
busses = usb.busses()
for bus in busses:
devices = bus.devices
for dev in devices:
if ((vendor==None and dev.idVendor in usbVendorList) or dev.idVendor==vendor) \
and ((product==None and dev.idProduct in usbProductList) or dev.idProduct==product):
devlist.append([''.join([bus.dirname + ":" + dev.filename]), \
dev.open().getString(dev.iProduct, 50), \
dev.open().getString(dev.iSerialNumber, 12)])
return devlist
def isIpAddr(ip):
'''Return True if the given string is a valid IPv4 or IPv6 address.'''
import socket
def is_valid_ipv4_address(address):
try: socket.inet_pton(socket.AF_INET, address)
except AttributeError: # no inet_pton here, sorry
try: socket.inet_aton(address)
except socket.error: return False
return (address.count('.') == 3)
except socket.error: return False
return True
def is_valid_ipv6_address(address):
try: socket.inet_pton(socket.AF_INET6, address)
except socket.error: return False
return True
return ( is_valid_ipv6_address(ip) or is_valid_ipv4_address(ip) )
def devlist(vendor=None, product=None, gps=None, include=None):
'''
Return device information for all present devices,
filtering if requested by vendor and/or product IDs on USB devices, and
running device fingerprint functions on serial devices.
@type gps: String
@param gps: Optional serial device identifier for an attached GPS
unit. If provided, or if global variable has previously been set,
KillerBee skips that device in device enumeration process.
@type include: List of Strings
@param include: Optional list of device handles to be appended to the
normally found devices. This is useful for providing IP addresses for
remote scanners.
@rtype: List
@return: List of device information present.
For USB devices, get [busdir:devfilename, productString, serialNumber]
For serial devices, get [serialFileName, deviceDescription, ""]
'''
global usbVendorList, usbProductList, gps_devstring
if gps is not None an |
JetStarBlues/Nand-2-Tetris | PlayArea/parallel/_99__parallelHelpers.py | Python | mit | 1,142 | 0.049037 | '''
As shown in this tutorial by Sentdex,
www.youtube.com/watch?v=NwH0HvMI4EA
And
https://pymotw.com/3/queue/
'''
import threading
from queue import Queue
class execInParallel():
def run( self, nThreads, fx, args ):
self.q = Queue()
self.action = fx
self.createJobs( args )
self.createThreads( nThreads ) # create workers
self.q.join()
def performJob( self ):
while True:
# Get job from queue
item = self.q.get()
# Perform job
self.action( item )
# Job completed, indicate available to perform another
self.q.task_done()
def createThreads( self, nThreads ):
for _ in range( nThreads ):
# Worker
t = threading.Thread(
# name = 'worker-{}'.format( _ ),
target = self.performJob
)
t.daemon = True # die when main thread dies |
t.start()
def createJobs( self, jobs ):
for job in jobs:
self.q.put( job )
# --------------------------------------------------------
# Example...
'''
s = list( "hello_ryden" )
def printChar | ( c ): print( c )
def printChar2( c ): print( c * 3 )
e = execInParallel()
e.run( 2, printChar, s )
e.run( 5, printChar2, s )
''' |
shadowoneau/skylines | skylines/commands/users/merge.py | Python | agpl-3.0 | 1,896 | 0.002637 | from flask_script import Command, Option
import sys
from skylines.database import db
from skylines.model import User, Club, IGCFile, Flight, TrackingFix
class Merge(Command):
""" Merge two user accounts """
option_list = (
Option('new_id', type=int, help='ID of the new user account'),
Option('old_id', type=int, help='ID of the old user account'),
)
def run(self, new_id, old_id):
new = db.session.query(User).get(new_id)
if not new:
print >>sys.stderr, "No such user: %d" % new_id
old = db.session.query(User).get(old_id)
if n | ot old:
print >>sys.stderr, "No such user: %d" % old_id
if old.club != new.club:
print >>sys.stderr, "Different club;", old.club, new.club
sys.exit(1)
db.session.query(C | lub).filter_by(owner_id=old_id).update({'owner_id': new_id})
db.session.query(IGCFile).filter_by(owner_id=old_id).update({'owner_id': new_id})
db.session.query(Flight).filter_by(pilot_id=old_id).update({'pilot_id': new_id})
db.session.query(Flight).filter_by(co_pilot_id=old_id).update({'co_pilot_id': new_id})
db.session.query(TrackingFix).filter_by(pilot_id=old_id).update({'pilot_id': new_id})
db.session.flush()
db.session.commit()
new = db.session.query(User).get(new_id)
old = db.session.query(User).get(old_id)
assert new and old
db.session.delete(old)
db.session.flush()
if new.email_address is None and old.email_address is not None:
new.email_address = old.email_address
if new._password is None and old._password is not None:
new._password = old._password
# TODO: merge display name or not?
if old.tracking_key is not None:
new.tracking_key = old.tracking_key
db.session.commit()
|
Nonse/monkeys | tests/test_models.py | Python | mit | 6,811 | 0 | import random
from monkeygod import models
def test_avatar(session):
"""Gravatar URL should be generated"""
m1 = models.Monkey(
name='monkey1',
age=10,
email='monkey1@example.com'
)
session.add(m1)
session.commit()
avatar = m1.avatar(128)
expected = (
'http://www.gravatar.com/avatar/90cab8a06b72c3ea49d7a09192b43166'
)
assert avatar[0:len(expected)] == expected
def test_is_friend(session):
m1 = models.Monkey(
name='monkey1',
age=10,
email='monkey1@example.com'
)
m2 = models.Monkey(
name='monkey2',
age=20,
email='monkey2@example.com'
)
m3 = models.Monkey(
name='monkey3',
age=30,
email='monkey3@example.com'
)
session.add_all([m1, m2, m3])
session.commit()
m1.friends.append(m2)
m2.friends.append(m1)
session.add_all([m1, m2])
session.commit()
assert m1.is_friend(m2) is True
assert m2.is_friend(m1) is True
assert m2.is_friend(m3) is False
assert m3.is_friend(m2) is False
def test_friends(session):
"""Database test to ensure a monkey can add/delete friends"""
m1 = models.Monkey(
name='monkey1',
age=10,
email='monkey1@example.com'
)
m2 = models.Monkey(
name='monkey2',
age=20,
email='monkey2@example.com'
)
session.add_all([m1, m2])
session.commit()
assert m1.is_friend(m2) is False, 'Monkeys are not friends initially'
assert m2.is_friend(m1) is False, 'Monkeys are not friends initially'
assert m1.delete_friend(m2) is False, 'Removing non-existing friend fails'
assert m1.add_friend(m1) is False, 'Cant add self to friends'
assert m1.add_friend(m2) is True, 'Adding friend succeeds'
session.add_all([m1, m2])
session.commit()
assert m1.friends.count() == 1, 'Monkey has 1 friend'
assert m2.friends.count() == 1, 'Friendship is bidirectional'
assert m1.is_friend(m2) is True, 'Friend is the correct one'
assert m2.is_friend(m1) is True, 'Second monkey has the correct friend too'
assert m1.add_friend(m2) is False, 'Cant add the existing friend'
assert m1.delete_friend(m2) is True, 'Deleting friend works correctly'
session.add_all([m1, m2])
session.commit()
assert m1.friends.count() == 0, 'Monkey again has no friends'
assert m2.friends.count() == 0, 'Deleting friends is bidirectional'
assert m1.is_friend(m2) is False, 'Monkeys are not friends anymore'
assert m2.is_friend(m1) is False, 'Monkeys are not friends anymore'
def test_many_friends(session):
"""Database test to ensure a monkey can have more than one friend"""
m1 = models.Monkey(
name='monkey1',
age=10,
email='monkey1@example.com'
)
m2 = models.Monkey(
name='monkey2',
age=20,
email='monkey2@example.com'
)
m3 = models.Monkey(
name='monkey3',
age=30,
email='monkey3@example.com'
)
session.add_all([m1, m2, m3])
session.commit()
m1.add_friend(m2)
assert m1.add_friend(m3) is True, 'Monkey can have more than 1 friend'
session.add_all([m1, m2, m3])
session.commit()
assert m1.friends.count() == 2, 'Monkey1 have more than 1 friend'
assert m2.friends.count() == 1, 'Friends added bidirectionally'
assert m3.friends.count() == 1, 'Friends added bidirectionally'
assert m2.is_friend(m3) is False, 'Two other monkeys are not friends'
def test_best_friends(session):
"""Database test to ensure best friend logic works correctly"""
m1 = models.Monkey(
name='monkey1',
age=10,
email='monkey1@example.com'
)
m2 = models.Monkey(
name='monkey2',
age=20,
email='monkey2@example.com'
)
m3 = models.Monkey(
name='monkey3',
age=30,
email='monkey3@example.com'
)
session.add_all([m1, m2, m3])
session.commit()
assert m1.best_friend is None, 'Monkey has no best friend initially'
assert m2.best_friend is None, 'Monkey has no best friend initially'
assert m3.best_friend is None, 'Monkey has no best friend initially'
assert m1.add_best_friend(m1) is False, 'Cant add self as best friend'
assert m1.add_best_friend(m3) is True, 'Can add other monkeys as bf'
assert m2.add_best_friend(m3) is True, (
'Multiple monkeys can consider one monkey best friend'
)
session.add_all([m1, m2, m3])
session.commit()
assert m1.best_friend == m3, 'Monkey has correct best friend'
assert m3.best_friend_of.count() == 2, (
'Monkey3 is considered best friend of multiple monkeys'
)
assert m3.best_friend is None, 'Best friend is not bidirectional'
assert m1.add_best_friend(m2) is True, 'Can change best friend'
m2.best_friend = None
session.add_all([m1, m2, m3])
session.commit()
assert m1.best_friend == m2, 'Changed best friend success | fully'
assert m2.best_friend is None, 'Removing best friend succeeds'
assert m1.delete_friend(m2) is True, 'Can delete friend who is also best'
session.add_all([m1, m2, m3])
session.commit()
assert m1.best_friend is None, 'Deleting fro | m friends also clears best'
def test_friends_without_best(session):
m1 = models.Monkey(
name='monkey1',
age=10,
email='monkey1@example.com'
)
m2 = models.Monkey(
name='monkey2',
age=20,
email='monkey2@example.com'
)
m3 = models.Monkey(
name='monkey3',
age=30,
email='monkey3@example.com'
)
session.add_all([m1, m2, m3])
session.commit()
m1.add_friend(m2)
m1.add_best_friend(m3)
session.add_all([m1, m2, m3])
session.commit()
no_bf_friends = m1.friends_without_best()
for friend in no_bf_friends:
assert m1.best_friend != friend
assert (m1.friends.count() - no_bf_friends.count()) == 1, (
'All friends but best'
)
assert m2.friends.count() == m2.friends_without_best().count(), (
'Without best friend lists are the same'
)
def test_non_friends(session):
m1 = models.Monkey(
name='monkey1',
age=10,
email='monkey1@example.com'
)
m2 = models.Monkey(
name='monkey2',
age=20,
email='monkey2@example.com'
)
m3 = models.Monkey(
name='monkey3',
age=30,
email='monkey3@example.com'
)
session.add_all([m1, m2, m3])
session.commit()
m1.add_friend(m2)
session.add_all([m1, m2])
session.commit()
others = m1.non_friends()
assert others.count() == 1, 'Lists one not added friend'
for monkey in others:
assert not m1.is_friend(monkey), 'Monkeys are not friends'
|
mattvonrocketstein/smash | smashlib/ipy3x/kernel/channelsabc.py | Python | mit | 2,319 | 0.000431 | """Abstract base classes for kernel client channels"""
# Copyright (c) IPython Development Team. |
# Distributed under the terms of the Modified BSD License.
import abc
from IPython.utils.py3compat import with_metaclass
class ChannelABC(with_metaclass(abc.ABCMeta, object)):
"""A base class for all channel ABCs."""
@abc.abstractmethod
def start(sel | f):
pass
@abc.abstractmethod
def stop(self):
pass
@abc.abstractmethod
def is_alive(self):
pass
class ShellChannelABC(ChannelABC):
"""ShellChannel ABC.
The docstrings for this class can be found in the base implementation:
`IPython.kernel.channels.ShellChannel`
"""
@abc.abstractproperty
def allow_stdin(self):
pass
@abc.abstractmethod
def execute(self, code, silent=False, store_history=True,
user_expressions=None, allow_stdin=None):
pass
@abc.abstractmethod
def complete(self, text, line, cursor_pos, block=None):
pass
@abc.abstractmethod
def inspect(self, oname, detail_level=0):
pass
@abc.abstractmethod
def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
pass
@abc.abstractmethod
def kernel_info(self):
pass
@abc.abstractmethod
def shutdown(self, restart=False):
pass
class IOPubChannelABC(ChannelABC):
"""IOPubChannel ABC.
The docstrings for this class can be found in the base implementation:
`IPython.kernel.channels.IOPubChannel`
"""
@abc.abstractmethod
def flush(self, timeout=1.0):
pass
class StdInChannelABC(ChannelABC):
"""StdInChannel ABC.
The docstrings for this class can be found in the base implementation:
`IPython.kernel.channels.StdInChannel`
"""
@abc.abstractmethod
def input(self, string):
pass
class HBChannelABC(ChannelABC):
"""HBChannel ABC.
The docstrings for this class can be found in the base implementation:
`IPython.kernel.channels.HBChannel`
"""
@abc.abstractproperty
def time_to_dead(self):
pass
@abc.abstractmethod
def pause(self):
pass
@abc.abstractmethod
def unpause(self):
pass
@abc.abstractmethod
def is_beating(self):
pass
|
aziflaj/numberoid | src/matrix/pymatrix.py | Python | mit | 5,173 | 0.000773 | import math
import copy
def print_matrix(matrix):
"""
This function prettyprints a matrix
:param matrix: The matrix to prettyprint
"""
for i in range(len(matrix)):
print(matrix[i])
def transpose(matrix):
"""
This function transposes a matrix
:param matrix: The matrix to transpose
:return: The transposed matrix
"""
num_cols = len(matrix[0])
trans = []
for i in range(num_cols):
temp = []
for row in matrix:
temp.append(row[i])
trans.append(temp)
return trans
def minor_matrix(matrix, row_index, col_index):
"""
This function calculates the minor of a matrix for a given row and column
index. The matrix should be a square matrix, and the row and column
should be positive and smaller than the width and height of the matrix.
:param matrix: The matrix to calculate the minor
:param row_index: The row index of the minor to calculate
:param col_index: The column index of the minor to calculate
:return: The minor for the given row and column
"""
minor = matrix
num_rows = len(matrix)
num_cols = len(matrix[0])
if num_cols != num_rows:
raise ValueError("You should pass a square matrix")
if row_index > num_rows or col_index > num_cols or row_index < 1 or col_index < 1:
raise ValueError("Invalid row or column")
# remove the specified row
minor.pop(row_index - 1)
# remove the specified column
for row in minor:
row.pop(col_index - 1)
return minor
def determinant(matrix):
"""
This function calculates the determinant of a square matrix.
:param m_copy: The matrix to find the determinant
:return: The determinant of the matrix
"""
num_rows = len(matrix)
num_cols = len(matrix[0])
if num_cols != num_rows:
raise ValueError("You should pass a square matrix")
dim = num_cols
det = 0
if dim == 1:
return matrix[0][0]
if dim == 2:
det = matrix[0][0] * matrix[1][1] - matrix[0][1] * matrix[1][0]
return det
for j in range(dim):
m_copy = copy.deepcopy(matrix)
minor = minor_matrix(m_copy, 1, j+1)
d = determinant(minor)
det += matrix[0][j] * d * math.pow(-1, j)
return det
def inverse(matrix):
"""
This function inverts a square matrix. If the matrix is not square,
it returns nothing
:param matrix: The matrix to invert
:return: The inverse of the matrix passed as parameter
"""
num_rows = len(matrix)
num_cols = len(matrix[0])
if num_rows != num_cols:
raise ValueError("You should pass a square matrix")
dim = num_rows
denom = determinant(matrix)
if denom == 0:
raise ValueError("The determinant is 0. Can't invert matrix")
cofactors = [] # the matrix of cofactors, transposed
for i in range(dim):
cofactor_row = []
for j in range(dim):
m_copy = copy.deepcopy(matrix)
minor = minor_matrix(m_copy, j+1, i+1)
minor_det = determinant(minor) * math.pow(-1, i + j)
cofactor_row.append(minor_det)
cofactors.append(cofactor_row)
# multiply every cofactor with 1/denom
scalar_multiply(cofactors, 1 / denom)
return cofactors
def scalar_multiply(matrix, const):
"""
This function makes the scalar multiplication between a matrix and a number.
:param matrix: The matrix to multiply
:param const: The constant number which will multiply the matrix
:return: The result of the multiplication
"""
for i in range(len(matrix)):
for j in range(len(matrix[0])):
matrix[i][j] *= const
return matrix
def multiply(matrix1, matrix2):
"""
This function multiplies two matrices. In order to multiply, it makes sure
the width of matrix1 is the same as the height of matrix2
:param matrix1: Left matrix
:param matrix2: Right matrix
:return: The product matrix of the multiplication
"""
width1 = len(matrix1[0])
height2 = len(matrix2)
if width1 != height2:
raise ValueError("Can't multiply these matrices")
length = len(matrix1)
width = len(matrix2[0])
product_matrix = [] # product_matrix = matrix_A * matrix_B
for i in range(length):
product_row = [] # one row of product_matrix
for j in range(width):
val = 0
for a in range(height2):
val += matrix1[i][a] * matrix2[a][j]
product_row.append(val)
product_matrix.append(product_row)
return product_matrix
def linear_solver(coef, const):
"""
This function solves a system of linear equations of the standard form Ax = B
:param coef: The matrix of coefficients, A
:param const: The matrix of constant terms, B
:returns: A list of the solutions
"""
if len(coef) == 2:
y = (const[0][0] * coef[1][0] - coef[0 | ][0] * const[1][0]) / (-coef[0][0] * coef[1][1] + coef[1][0] * coef[0][1])
x = (const[1][0] - coef[1][1] * y) / coef[1][0]
return [x, y | ]
return multiply(inverse(coef), const)
|
Tomsod/gemrb | gemrb/GUIScripts/pst/NewLife.py | Python | gpl-2.0 | 10,818 | 0.040858 | # -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# NewLife.py - Character generation screen
###################################################
import GemRB
from GUIDefines import *
from ie_stats import *
import CommonTables
CommonTables.Load()
NewLifeWindow = 0
QuitWindow = 0
TextArea = 0
TotLabel = 0
AcLabel = 0
HpLabel = 0
StatTable = 0
# maintain this order in all lists!
#Stats = [ Str, Int, Wis, Dex, Con, Cha ]
Stats = [ 0, 0, 0, 0, 0, 0 ]
StatLimit = [ 23, 18, 18, 18, 18, 18 ]
StatLabels = [ None ] * 6
StatLowerLimit = [ 9 ] * 6
LevelUp = 0
TotPoints = 0
AcPoints = 0
HpPoints = 0
strings = ("30","60","90","99","00")
extras = (30,60,90,99,100)
def OnLoad():
OpenLUStatsWindow (0)
return
def OpenLUStatsWindow(Type = 1):
global NewLifeWindow, QuitWindow, StatTable
global TotPoints, AcPoints, HpPoints
global TotLabel, AcLabel, HpLabel
global TextArea, Stats, StatLabels, StatLowerLimit, StatLimit, LevelUp
GemRB.SetRepeatClickFlags(GEM_RK_DOUBLESPEED, OP_SET)
LevelUp = Type
if LevelUp:
import GUICommonWindows
import GUIREC
GUICommonWindows.OptionsWindow.SetVisible (WINDOW_INVISIBLE)
GUICommonWindows.PortraitWindow.SetVisible (WINDOW_INVISIBLE)
GUICommonWindows.ActionsWindow.SetVisible (WINDOW_INVISIBLE)
GUIREC.RecordsWindow.SetVisible (WINDOW_INVISIBLE)
else:
GemRB.LoadGame(None) #loading the base game
StatTable = GemRB.LoadTable("abcomm")
GemRB.LoadWindowPack("GUICG")
#setting up confirmation window
QuitWindow = GemRB.LoadWindow(1)
QuitWindow.SetVisible(WINDOW_INVISIBLE)
#setting up CG window
NewLifeWindow = GemRB.LoadWindow(0)
if LevelUp:
Str = GemRB.GetPlayerStat(1, IE_STR, 1)
Dex = GemRB.GetPlayerStat(1, IE_DEX, 1)
Con = GemRB.GetPlayerStat(1, IE_CON, 1)
Wis = GemRB.GetPlayerStat(1, IE_WIS, 1)
Int = GemRB.GetPlayerStat(1, IE_INT, 1)
Cha = GemRB.GetPlayerStat(1, IE_CHR, 1)
TotPoints = 1 # FIXME: actually LevelDiff
Stats = [ Str, Int, Wis, Dex, Con, Cha ]
StatLowerLimit = list(Stats) # so we copy the values or the lower limit would increase with them
StatLimit = [ 25 ] * 6
else:
Str = Dex = Con = Wis = Int = Cha = 9
TotPoints = 21
Stats = [ Str, Int, | Wis, Dex, Con, Cha ]
# stat label controls
for i in range(len(Stats)):
StatLabels[i] = NewLifeWindow.GetControl(0x10000018 + i)
# individual stat buttons
for i in range(len(Stats)):
Button = NewLifeWindow.GetControl (i+2)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_SET)
Button.SetEvent (IE_GUI_MOUSE_OVER_BUTTON, StatPress[i])
Button = NewLifeWindow.GetControl | (8)
Button.SetFlags(IE_GUI_BUTTON_RADIOBUTTON, OP_SET)
Button.SetState(IE_GUI_BUTTON_LOCKED)
Button.SetSprites("", 0, 0, 0, 0, 0)
Button.SetText(5025)
Button.SetEvent(IE_GUI_MOUSE_OVER_BUTTON, AcPress)
Button = NewLifeWindow.GetControl(9)
Button.SetFlags(IE_GUI_BUTTON_RADIOBUTTON, OP_SET)
Button.SetState(IE_GUI_BUTTON_LOCKED)
Button.SetSprites("", 0, 0, 0, 0, 0)
Button.SetText(5026)
Button.SetEvent(IE_GUI_MOUSE_OVER_BUTTON, HpPress)
Button = NewLifeWindow.GetControl(10)
Button.SetFlags(IE_GUI_BUTTON_RADIOBUTTON, OP_SET)
Button.SetState(IE_GUI_BUTTON_LOCKED)
Button.SetSprites("", 0, 0, 0, 0, 0)
Button.SetText(5027)
Button.SetEvent(IE_GUI_MOUSE_OVER_BUTTON, PointPress)
# stat +/- buttons
for i in range(len(StatPress)):
Button = NewLifeWindow.GetControl (11+2*i)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, IncreasePress)
Button.SetEvent (IE_GUI_MOUSE_OVER_BUTTON, StatPress[i])
Button.SetVarAssoc ("Pressed", i)
Button = NewLifeWindow.GetControl (12+2*i)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, DecreasePress)
Button.SetEvent (IE_GUI_MOUSE_OVER_BUTTON, StatPress[i])
Button.SetVarAssoc ("Pressed", i)
NewLifeLabel = NewLifeWindow.GetControl(0x10000023)
NewLifeLabel.SetText(1899)
TextArea = NewLifeWindow.GetControl(23)
TextArea.SetText(18495)
TotLabel = NewLifeWindow.GetControl(0x10000020)
AcLabel = NewLifeWindow.GetControl(0x1000001E)
HpLabel = NewLifeWindow.GetControl(0x1000001F)
Label = NewLifeWindow.GetControl(0x10000021)
Label.SetText(254)
PhotoButton = NewLifeWindow.GetControl(35)
PhotoButton.SetState(IE_GUI_BUTTON_LOCKED)
PhotoButton.SetFlags(IE_GUI_BUTTON_NO_IMAGE | IE_GUI_BUTTON_PICTURE, OP_SET)
PhotoButton.SetEvent(IE_GUI_MOUSE_OVER_BUTTON, OverPhoto)
PhotoButton.SetPicture("STPNOC")
AcceptButton = NewLifeWindow.GetControl(0)
AcceptButton.SetText(4192)
AcceptButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, AcceptPress)
AcceptButton.SetFlags(IE_GUI_BUTTON_DEFAULT,OP_OR)
CancelButton = NewLifeWindow.GetControl(1)
CancelButton.SetText(4196)
CancelButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, CancelPress)
UpdateLabels()
NewLifeWindow.SetVisible(WINDOW_VISIBLE)
return
def UpdateLabels():
global AcPoints, HpPoints
Str = Stats[0]
if Str<=18:
StatLabels[0].SetText(str(Str))
else:
StatLabels[0].SetText("18/"+strings[Str-19])
for i in range(1, len(Stats)):
StatLabels[i].SetText(str(Stats[i]))
TotLabel.SetText(str(TotPoints))
if LevelUp:
AcPoints = GemRB.GetPlayerStat(1, IE_ARMORCLASS, 1)
else:
AcPoints = 10
Dex = Stats[3]
if Dex > 14:
AcPoints = AcPoints - (Dex-14)
if LevelUp:
HpPoints = GemRB.GetPlayerStat(1, IE_HITPOINTS, 1)
else:
HpPoints = 20
Con = Stats[4]
if Con > 14:
HpPoints = HpPoints + (Con-9)*2 + (Con-14)
else:
HpPoints = HpPoints + (Con-9)*2
AcLabel.SetText(str(AcPoints))
HpLabel.SetText(str(HpPoints))
return
def OkButton():
QuitWindow.SetVisible(WINDOW_INVISIBLE)
NewLifeWindow.SetVisible(WINDOW_VISIBLE)
return
def AcceptPress():
if TotPoints:
# Setting up the error window
TextArea = QuitWindow.GetControl(0)
TextArea.SetText(46782)
Button = QuitWindow.GetControl(1)
Button.SetText("")
Button.SetFlags(IE_GUI_BUTTON_NO_IMAGE,OP_SET)
Button.SetState(IE_GUI_BUTTON_DISABLED)
Button = QuitWindow.GetControl(2)
Button.SetText(46783)
Button.SetFlags(IE_GUI_BUTTON_DEFAULT,OP_OR)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, OkButton)
NewLifeWindow.SetVisible(WINDOW_GRAYED) #go dark
QuitWindow.SetVisible(WINDOW_VISIBLE)
return
if NewLifeWindow:
NewLifeWindow.Unload()
if QuitWindow:
QuitWindow.Unload()
#set my character up
if not LevelUp:
MyChar = GemRB.CreatePlayer ("charbase", 1)
Str = Stats[0]
if Str<=18:
GemRB.SetPlayerStat(1, IE_STR, Str)
GemRB.SetPlayerStat(1, IE_STREXTRA,0)
else:
GemRB.SetPlayerStat(1, IE_STR, 18)
GemRB.SetPlayerStat(1, IE_STREXTRA,extras[Str-19])
GemRB.SetPlayerStat(1, IE_INT, Stats[1])
GemRB.SetPlayerStat(1, IE_WIS, Stats[2])
GemRB.SetPlayerStat(1, IE_DEX, Stats[3])
GemRB.SetPlayerStat(1, IE_CON, Stats[4])
GemRB.SetPlayerStat(1, IE_CHR, Stats[5])
if LevelUp:
# hp is handled in GUIREC
import GUIREC
GUIREC.OpenLevelUpWindow ()
return
#don't add con bonus, it will be calculated by the game
#interestingly enough, the game adds only one level's con bonus
Con = Stats[4]
if Con > 14:
x = 30
else:
x = 20 + (Con-9)*2
print "Setting max hp to: ",x
GemRB.SetPlayerStat(1, IE_MAXHITPOINTS, x)
#adding the remaining constitution bonus to the current hp
#if Con>14:
# x = x+(Con-14)*3
print "Setting current hp to: ",x
GemRB.SetPlayerStat(1, IE_HITPOINTS, x)
GemRB.FillPlayerInfo(1) #does all the rest
#alter this if needed
GemRB.SetRepeatClickFlags(GEM_RK_DISABLE, OP_SET)
#LETS PLAY!!
GemRB.EnterGame()
return
def CancelPress():
# Setting up the confirmation window
TextArea = QuitWindow.GetContr |
dssg/cincinnati2015-public | evaluation/webapp/views.py | Python | mit | 3,341 | 0.006286 | from io import BytesIO
from itertools import groupby
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from flask import make_response, render_template, abort
from webapp import app
from webapp.evaluation import *
from webapp.ioutils import *
from webapp import config
@app.route('/')
def index():
experiments = get_experiments_list()
# group by date, newest first
experiments = sorted(experiments, key=lambda r: r.timestamp.date(), reverse=True)
experiments = [(date, list(items)) for date, items in groupby(experiments, lambda r: r.timestamp.date())]
# for each date sort its results, best first
experiments = [(date, sorted(items, key=lambda r: r.score, reverse=True))
for date, items in experiments]
return render_template('overview.html', experiments=experiments, score_name=config.score_name)
@app.route('/<timestamp>')
def details(timestamp):
# will fail with 404 if exp not known
get_labels_predictions(timestamp)
return render_template('details.html', timestamp=timestamp)
@app.route("/<timestamp>/norm_confusions")
def normalized_confusion_matrix(timestamp):
test_labels, test_predictions = get_labels_predictions(timestamp)
matrix_fig = plot_normalized_confusion_matrix(test_labels, test_predictions)
return serve_matplotlib_fig(matrix_fig)
@app.route("/<timestamp>/importances")
def feature_importances(timestamp):
features, | importances = get_feature_importances(timestamp)
importance_fig = plot_feature_importances(features, importances)
return serve_matplotlib_fig(importance_fig)
@app.route("/<timestamp>/precision-recall")
def precision_recall(timestamp):
test_labels, test_predictions = get_labels_predictions(timestamp)
prec_recall_fig = plot_precision_recall_n(test_labels, test_predictions)
return serve_matplotlib_fig(prec_recall_fig)
@app.route("/<timestamp>/ | precision-cutoff")
def precision_cutoff(timestamp):
test_labels, test_predictions = get_labels_predictions(timestamp)
prec_cutoff_fig = plot_precision_cutoff(test_labels, test_predictions)
return serve_matplotlib_fig(prec_cutoff_fig)
@app.route("/<timestamp>/ROC")
def ROC(timestamp):
test_labels, test_predictions = get_labels_predictions(timestamp)
roc_fig = plot_ROC(test_labels, test_predictions)
return serve_matplotlib_fig(roc_fig)
@app.route("/growth")
def growth():
experiments = get_experiments_list()
# group by date, newest first
experiments = sorted(experiments, key=lambda r: r.timestamp.date(), reverse=True)
experiments = [(date, list(items)) for date, items in groupby(experiments, lambda r: r.timestamp.date())]
# only keep best result for each day
experiments = [(date, sorted(items, key=lambda r: r.score, reverse=True)[0])
for date, items in experiments]
experiments = [(date, best.score) for date, best in experiments]
growth_fig = plot_growth(experiments)
return serve_matplotlib_fig(growth_fig)
def serve_matplotlib_fig(fig):
canvas=FigureCanvas(fig)
png_output = BytesIO()
canvas.print_png(png_output)
response = make_response(png_output.getvalue())
response.headers['Content-Type'] = 'image/png'
return response
|
TinghuiWang/pyActLearn | docs/conf.py | Python | bsd-3-clause | 10,278 | 0.00574 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# pyActLearn documentation build configuration file, created by
# sphinx-quickstart on Mon Nov 14 10:34:33 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon'
]
# Use ivar in napoleon to render attributes
napoleon_use_ivar = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'pyActLearn'
copyright = '2016, Tinghui Wang'
author = 'Tinghui Wang'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
exec_results = {}
exec(open(os.path.join(os.path.dirname(__file__), '../pyActLearn/version.py')).read(), exec_results)
pyActLearnVersion = exec_results['version']
version = pyActLearnVersion
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ. | get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
h | tml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyActLearndoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_do |
thenenadx/forseti-security | google/cloud/security/common/data_access/group_dao.py | Python | apache-2.0 | 3,871 | 0.000517 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides the data access object (DAO) for Groups."""
from Queue import Queue
from google.cloud.security.common.data_access import dao
from google.cloud.security.common.data_access.sql_queries import select_data
from google.cloud.security.common.util import log_util
# TODO: The next editor must remove this disable and correct issues.
# pylint: disable=missing-type-doc,missing-return-type-doc
LOGGER = log_util.get_logger(__name__)
MY_CUSTOMER = 'my_customer'
class GroupDao(dao.Dao):
"""Data access object (DAO) for Groups."""
def get_all_groups(self, resource_name, timestamp):
"""Get all the groups.
Args:
resource_name: String of the resource name.
timestamp: The timestamp of the snapshot.
Returns:
A tuple of the groups as dict.
"""
sql = select_data.GROUPS.format(timestamp)
return self.execute_sql_with_fetch(resource_name, sql, None)
def get_group_id(self, resource_name, group_email, timestamp):
"""Get the group_id for the specified group_email.
Args:
resource_name: String of the resource name.
group_email: String of the group email.
timestamp: The timestamp of the snapshot.
Returns:
String of the group id.
"""
sql = select_data.GROUP_ID.format(timestamp)
result = self.execute_sql_with_fetch(resource_name, sql, (group_email,))
return result[0].get('group_id')
def get_group_members(self, resource_name, group_id, timestamp):
"""Get the members of a group.
Args:
resource_name: String of the resource name.
group_id: String of the group id.
timestamp: The timestamp of the snapshot.
Returns:
A tuple of group members in dict format.
({'group_id': '00lnxb',
'member_email': 'foo@mygbiz.com',
'member_id': '11111',
'member_role': 'OWNER',
'member_type': 'USER'}, ...)
"""
sql = select_data.GROUP_MEMBERS.format(timestamp)
return self.execute_sql_with_fetch(resource_name, sql, (group_id,))
def get_recursive_members_of_group(self, group_email, timestamp):
"""Get all the recursive members of a group.
Args:
group_email: String of the group email.
timestamp: The timestamp of the snapshot.
Returns:
A list of group members in dict format.
[{'group_id': '00lnxb',
'member_email': 'foo@mygbiz.com',
'member_id': '11111',
'member_role': 'OWNER',
'member_type': 'USER'}, ...]
| """
all_members = []
queue = Queue()
group_id = self.get_group_id('group', group_email, timestamp)
queue.put(group_id)
while not queue.empty():
group_id = queue.get()
members = self.get_group_members('group_members', group_id,
| timestamp)
for member in members:
all_members.append(member)
if member.get('member_type') == 'GROUP':
queue.put(member.get('member_id'))
return all_members
|
bhanu-mnit/EvoML | evoml/subsampling/test_auto_segmentEG_FEGT.py | Python | gpl-3.0 | 1,004 | 0.022908 | import pandas as pd
from sklearn.datasets import load_boston
from sklearn.linear_model import LinearRegression
from sklearn.tree import DecisionTreeRegressor
from sklearn.cross_validation import train_test_split
from sklearn.metrics import mean_squared_error
from .auto_segment_FEGT import BasicSegmenter_FEGT
def demo(X = None, y = None, test_size = 0.1):
if X | == None:
boston = load_boston()
X = pd.DataFrame(boston.data)
y = pd.DataFrame(boston.target)
base_estimator = DecisionTreeRegressor(max_depth = 5)
X_train, X_test, y_tr | ain, y_test = train_test_split(X, y, test_size=test_size)
print X_train.shape
clf = BasicSegmenter_FEGT(ngen=30, init_sample_percentage = 1, n_votes=10, n = 10, base_estimator = base_estimator)
clf.fit(X_train, y_train)
print clf.score(X_test,y_test)
y = clf.predict(X_test)
print mean_squared_error(y, y_test)
print y.shape
print type(y)
return clf
|
lewislone/mStocks | packets-analysis/lib/XlsxWriter-0.7.3/examples/chart_scatter.py | Python | mit | 5,416 | 0.000923 | #######################################################################
#
# An example of creating Excel Scatter charts with Python and XlsxWriter.
#
# Copyright 2013-2015, John McNamara, jmcnamara@cpan.org
#
import xlsxwriter
workbook = xlsxwriter.Workbook('chart_scatter.xlsx')
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': 1})
# Add the worksheet data that the charts will refer to.
headings = ['Number', 'Batch 1', 'Batch 2']
data = [
[2, 3, 4, 5, 6, 7],
[10, 40, 50, 20, 10, 50],
[30, 60, 70, 50, 40, 30],
]
worksheet.write_row('A1', headings, bold)
worksheet.write_column('A2', data[0])
worksheet.write_column('B2', data[1])
worksheet.write_column('C2', data[2])
#######################################################################
#
# Create a new scatter chart.
#
chart1 = workbook.add_chart({'type': 'scatter'})
# Configure the first series.
chart1.add_series({
'name': '=Sheet1!$B$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$B$2:$B$7',
})
# Configure second series. Note use of alternative syntax to define ranges.
chart1.add_series({
'name': ['Sheet1', 0, 2],
'categories': ['Sheet1', 1, 0, 6, 0],
'values': ['Sheet1', 1, 2, 6, 2],
})
# Add a chart title and some axis labels.
chart1.set_title ({'name': 'Results of sample analysis'})
chart1.set_x_axis({'name': 'Test number'})
chart1.set_y_axis({'name': 'Sample length (mm)'})
# Set an Excel chart style.
chart1.set_style(11)
# Insert the chart into the worksheet (with an offset).
worksheet.insert_chart('D2', chart1, {'x_offset': 25, 'y_offset': 10})
#######################################################################
#
# Create a scatter chart sub-type with straight lines and markers.
#
chart2 = workbook.add_chart({'type': 'scatter',
'subtype': 'straight_with_markers'})
# Configure the first series.
chart | 2.add_series({
'name': '=Sheet1!$B$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=She | et1!$B$2:$B$7',
})
# Configure second series.
chart2.add_series({
'name': '=Sheet1!$C$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$C$2:$C$7',
})
# Add a chart title and some axis labels.
chart2.set_title ({'name': 'Straight line with markers'})
chart2.set_x_axis({'name': 'Test number'})
chart2.set_y_axis({'name': 'Sample length (mm)'})
# Set an Excel chart style.
chart2.set_style(12)
# Insert the chart into the worksheet (with an offset).
worksheet.insert_chart('D18', chart2, {'x_offset': 25, 'y_offset': 10})
#######################################################################
#
# Create a scatter chart sub-type with straight lines and no markers.
#
chart3 = workbook.add_chart({'type': 'scatter',
'subtype': 'straight'})
# Configure the first series.
chart3.add_series({
'name': '=Sheet1!$B$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$B$2:$B$7',
})
# Configure second series.
chart3.add_series({
'name': '=Sheet1!$C$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$C$2:$C$7',
})
# Add a chart title and some axis labels.
chart3.set_title ({'name': 'Straight line'})
chart3.set_x_axis({'name': 'Test number'})
chart3.set_y_axis({'name': 'Sample length (mm)'})
# Set an Excel chart style.
chart3.set_style(13)
# Insert the chart into the worksheet (with an offset).
worksheet.insert_chart('D34', chart3, {'x_offset': 25, 'y_offset': 10})
#######################################################################
#
# Create a scatter chart sub-type with smooth lines and markers.
#
chart4 = workbook.add_chart({'type': 'scatter',
'subtype': 'smooth_with_markers'})
# Configure the first series.
chart4.add_series({
'name': '=Sheet1!$B$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$B$2:$B$7',
})
# Configure second series.
chart4.add_series({
'name': '=Sheet1!$C$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$C$2:$C$7',
})
# Add a chart title and some axis labels.
chart4.set_title ({'name': 'Smooth line with markers'})
chart4.set_x_axis({'name': 'Test number'})
chart4.set_y_axis({'name': 'Sample length (mm)'})
# Set an Excel chart style.
chart4.set_style(14)
# Insert the chart into the worksheet (with an offset).
worksheet.insert_chart('D51', chart4, {'x_offset': 25, 'y_offset': 10})
#######################################################################
#
# Create a scatter chart sub-type with smooth lines and no markers.
#
chart5 = workbook.add_chart({'type': 'scatter',
'subtype': 'smooth'})
# Configure the first series.
chart5.add_series({
'name': '=Sheet1!$B$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$B$2:$B$7',
})
# Configure second series.
chart5.add_series({
'name': '=Sheet1!$C$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$C$2:$C$7',
})
# Add a chart title and some axis labels.
chart5.set_title ({'name': 'Smooth line'})
chart5.set_x_axis({'name': 'Test number'})
chart5.set_y_axis({'name': 'Sample length (mm)'})
# Set an Excel chart style.
chart5.set_style(15)
# Insert the chart into the worksheet (with an offset).
worksheet.insert_chart('D66', chart5, {'x_offset': 25, 'y_offset': 10})
workbook.close()
|
miti0/mosquito | strategies/ai/scikitbase.py | Python | gpl-3.0 | 1,418 | 0.002116 | from abc import ABC
import configargparse
from sklearn.externals import joblib
from termcolor import colored
class ScikitBase(ABC):
"""
Base class for AI strategies
"""
arg_parser = configargparse.get_argument_parser()
arg_parser.add('-p', '--pipeline', help='trained model/pipeline (*.pkl file)', required=True)
arg_parser.add('-f', '--feature_names', help='List of features list pipeline (*.pkl file)')
pipeline = None
def __init__(self):
args = self.arg_parser.parse_known_args()[0]
super(ScikitBase, self).__init__()
self.pipeline = self.load_pipelin | e(args.pipeline)
if args.feature_names:
self.feature_names = self.load_pipeline(args.feature_names)
@staticmethod
def load_pipeline(pipeline_file):
"""
Loads scikit model/pipeline
"""
print(colored('Loading pipeline: ' + pipeline_file, 'green'))
return joblib.load(pipeline_ | file)
def fetch_pipeline_from_server(self):
"""
Method fetches pipeline from server/cloud
"""
# TODO
pass
def predict(self, df):
"""
Returns predictions based on the model/pipeline
"""
try:
return self.pipeline.predict(df)
except (ValueError, TypeError):
print(colored('Got ValueError while using scikit model.. ', 'red'))
return None
|
c3cashdesk/c6sh | src/postix/core/migrations/0005_auto_20160207_1138.py | Python | agpl-3.0 | 485 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-07 10:38
from __future__ import unicode | _literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0004_warningconstraint'),
]
operations = [
migrations.AlterField(
model_name='preorderposition',
name | ='secret',
field=models.CharField(db_index=True, max_length=254, unique=True),
),
]
|
wfg2af/cs3240-labdemo | MessagePassing.py | Python | mit | 2,813 | 0.01102 | __author__ = 'wfg2af'
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA256
from Crypto import Random
class userMessages:
#Each user will have the public key of any other user.
UserMap = {}
UserName = ""
def __init__(self, UserName = "", UserMap = {}, key = None):
self.UserMap = UserMap
self.UserName = UserName
self.key = key
#Upon user creation each user will have
def send_message(self, message, recipient):
if self.UserMap.get(recipient,None) is not None:
public_key = self.UserMap.get(recipient)
encrypted = public_key.encrypt(message.encode(), 32)[0]
hash = SHA256.new(encrypted).digest()
signature = self.key.sign(hash, self.UserName)
return public_key.encrypt(message.encode(), 32)[0], signature, self.UserName
else:
return None
def receive_message(self, message_tuple):
message = message_tuple[0]
signature = message_tuple[1]
sender = message_tuple[2]
hash = SHA256.new(message).digest()
if self.UserMap.get(sender, None) is not None and self.UserMap.get(sender).verify(hash, signature):
return self.key.decrypt(message).decode()
else:
return sender + " did not send this message"
if __name__ == "__main__":
random_generator = Random.new().read
k1 = RSA.generate(1024, random_generator)
random_generator = Random.new().read
k2 = RSA.generate(1024, random_generator)
UM = {"User1": k1.publickey(), "User2": k2.publickey()}
user_1 = userMessages(UserName = "User1", UserMap = UM, key = k1)
user_2 = userMessages(UserName = "User2", UserMap = UM, key = k2)
prompt = ""
while(True):
prompt = input("Who is sending a message?: ")
if prompt == "-1":
break
elif prompt == "User1":
prompt = input("What message do you want to send?: ")
sent = user_1.send_message(prompt, "User2")
#message would be stored in data base h | ere and user 2 would be able to receive message at any time.
print("User1 sent: " + user_2.receive_message(sent))
elif prompt == "User2":
prompt = input("What message do you want to s | end?: ")
sent = user_2.send_message(prompt, "User1")
print("User2 sent: " + user_1.receive_message(sent))
else:
print("doing invalid signature test")
k3 = RSA.generate(1024, Random.new().read)
print("The message being sent to user1 is: hi")
message = k1.publickey().encrypt("hi".encode(), 32)[0]
hash = SHA256.new(message).digest()
signature = k3.sign(hash, 'User2')
print(user_1.receive_message((message,signature,"User2")))
|
lechat/CouchPotato | app/lib/provider/movie/sources/theMovieDb.py | Python | gpl-3.0 | 5,746 | 0.006091 | from app.config.cplog import CPLog
from app.lib.provider.movie.base import movieBase
from imdb import IMDb
from urllib import quote_plus
from urllib2 import URLError
import cherrypy
import os
import urllib2
log = CPLog(__name__)
class theMovieDb(movieBase):
"""Api for theMovieDb"""
apiUrl = 'http://api.themoviedb.org/2.1'
imageUrl = 'http://hwcdn.themoviedb.org'
def __init__(self, config):
log.info('Using TheMovieDb provider.')
self.config = config
def conf(self, option):
return self.config.get('TheMovieDB', option)
def find(self, q, limit = 8, alternative = True):
''' Find movie by name '''
if self.isDisabled():
return False
log.debug('TheMovieDB - Searching for movie: %s' % q)
url = "%s/%s/en/xml/%s/%s" % (self.apiUrl, 'Movie.search', self.conf('key'), quote_plus(self.toSearchString(q)))
try:
log.info('Searching: %s' % url)
data = urllib2.urlopen(url, timeout = self.timeout)
return self.parseXML(data, limit, alternative = alternative)
except:
return []
def findById(self, id):
''' Find movie by TheMovieDB ID '''
if self.isDisabled():
return False
xml = self.getXML(id)
if xml:
results = self.parseXML(xml, limit = 8)
return results.pop(0)
else:
return False
def findByImdbId(self, id):
''' Find movie by IMDB ID '''
if self.isDisabled():
return False
url = "%s/%s/en/xml/%s/%s" % (self.apiUrl, 'Movie.imdbLookup', self.conf('key'), id)
try:
data = urllib2.urlopen(url, timeout = self.timeout)
except (IOError, URLError):
log.error('Failed to open %s.' % url)
return []
results = self.parseXML(data, limit = 8, alternative = False)
if results:
return results.pop(0)
else:
return []
def parseXML(self, data, limit, alternative = True):
if data:
log.debug('TheMovieDB - Parsing RSS')
try:
xml = self.getItems(data, 'movies/movie')
results = []
nr = 0
for movie in xml:
id = int(self.gettextelement(movie, "id"))
name = self.gettextelement(movie, "name")
imdb = self.gettextelement(movie, "imdb_id")
year = str(self.gettextelement(movie, "released"))[:4]
# 1900 is the same as None
if year == '1900':
year = 'None'
# do some IMDB searching if needed
if year == 'None':
i = IMDb('mobile')
if imdb:
log.info('Found movie, but with no date, getting data from %s.' % imdb)
r = i.get_movie(imdb.replace('tt', ''))
year = r.get('year', None)
else:
log.info('Found movie, but with no date, searching IMDB.')
r = i.search_movie(name)
if len(r) > 0:
imdb = 'tt' + r[0].movieID
year = r[0].get('year', None)
results.append(self.fillFeedItem(id, name, imdb, year))
alternativeName = self.gettextelement(movie, "alternative_name")
if alternativeName and alternative:
if alternativeName.lower() != name.lower() and alternativeName.lower() != 'none' and alternativeName != None:
results.append(self.fillFeedItem(id, alternativeName, imdb, year))
nr += 1
if nr == limit:
break
log.info('TheMovieDB - Found: %s' % results)
return results
except SyntaxError:
log.error('TheMovieDB - Failed to parse XML response from TheMovieDb')
return False
def getXML(self, id):
if self.isDisabled():
return False
try:
url = "%s/%s/en/xml/%s/%s" % (self.apiUrl, 'Movie.getInfo', self.conf('key'), id)
data = urllib2.urlopen(url, timeout = self.timeout)
except:
data = False
return data
def saveImage(self, url, destination):
if url[:7] != 'http://':
url = self.imageUrl + url
# Make dir
imageCache = os.path.join(cherrypy.config.get('cachePath'), 'image | s')
if not os.path.isdir(imageCache):
os.mkdir(imageCache)
# Return old
imageFile = os.path.join(imageCache, destination)
if not os.path.isfile(imageFile):
try:
data = urllib2.urlopen(url, timeout = 10)
# Write file
with open(imageFile, 'wb') as f:
f.write(data.read())
except (IOError, URLError):
log.error('Failed get thumb %s.' | % url)
return False
return 'cache/images/' + destination
def fillFeedItem(self, id, name, imdb, year):
item = self.feedItem()
item.id = id
item.name = self.toSaveString(name)
item.imdb = imdb
item.year = year
return item
def isDisabled(self):
if self.conf('key') == '':
log.error('TheMovieDB - No API key provided for TheMovieDB')
True
else:
False
def findReleaseDate(self, movie):
pass
|
SlideAtlas/SlideAtlas-Server | slideatlas/__init__.py | Python | apache-2.0 | 74 | 0 | # coding= | utf-8
from slideatlas.core import create_app, create_celery_ap | p
|
keishi/chromium | ui/resources/resource_check/resource_scale_factors.py | Python | bsd-3-clause | 3,857 | 0.006222 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for Chromium browser resources.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl/git cl, and see
http://www.chromium.org/developers/web-development-style-guide for the rules
we're checking against here.
"""
import os
import struct
class ResourceScaleFactors(object):
"""Verifier of image dimensions for Chromium resources.
This class verifies the image dimensions of resources in the various
resource subdirectories.
Attributes:
paths: An array of tuples giving the folders to check and their
relevant scale factors. For example:
[(1, 'default_100_percent'), (2, 'default_200_percent')]
"""
def __init__(self, input_api, output_api, paths):
""" Initializes ResourceScaleFactors with paths."""
self.input_api = input_api
self.output_api = output_api
self.paths = paths
def RunChecks(self):
| """Verifies the scale factors of resources being added or modified.
Returns:
An array of presubmit errors if any images were detected not
having the correct dimensions.
"""
def ImageSize(filename):
with open(filename, 'rb', buffering=0) as f:
data = f.read(24)
assert data[:8] == '\x89P | NG\r\n\x1A\n' and data[12:16] == 'IHDR'
return struct.unpack('>ii', data[16:24])
# TODO(flackr): This should allow some flexibility for non-integer scale
# factors such as allowing any size between the floor and ceiling of
# base * scale.
def ExpectedSize(base_width, base_height, scale):
return round(base_width * scale), round(base_height * scale)
repository_path = self.input_api.os_path.relpath(
self.input_api.PresubmitLocalPath(),
self.input_api.change.RepositoryRoot())
results = []
# Check for affected files in any of the paths specified.
affected_files = self.input_api.AffectedFiles(include_deletes=False)
files = []
for f in affected_files:
for path_spec in self.paths:
path_root = self.input_api.os_path.join(
repository_path, path_spec[1])
if (f.LocalPath().endswith('.png') and
f.LocalPath().startswith(path_root)):
# Only save the relative path from the resource directory.
relative_path = self.input_api.os_path.relpath(f.LocalPath(),
path_root)
if relative_path not in files:
files.append(relative_path)
for f in files:
base_image = self.input_api.os_path.join(self.paths[0][1], f)
if not os.path.exists(base_image):
results.append(self.output_api.PresubmitError(
'Base image %s does not exist' % self.input_api.os_path.join(
repository_path, base_image)))
continue
base_width, base_height = ImageSize(base_image)
# Find all scaled versions of the base image and verify their sizes.
for i in range(1, len(self.paths)):
image_path = self.input_api.os_path.join(self.paths[i][1], f)
if not os.path.exists(image_path):
continue
# Ensure that each image for a particular scale factor is the
# correct scale of the base image.
exp_width, exp_height = ExpectedSize(base_width, base_height,
self.paths[i][0])
width, height = ImageSize(image_path)
if width != exp_width or height != exp_height:
results.append(self.output_api.PresubmitError(
'Image %s is %dx%d, expected to be %dx%d' % (
self.input_api.os_path.join(repository_path, image_path),
width, height, exp_width, exp_height)))
return results
|
seraphln/chat2all | chat2all/sso/weibo/api.py | Python | gpl-2.0 | 3,381 | 0.001775 |
#!/usr/bin/env python
# coding=utf8
#
"""
Python SDK for Weibo
Simple wrapper for weibo oauth2
author: seraphwlq@gmail.com
"""
import time
from utils.http import request
from utils.http import SDataDict
from utils.http import encode_params
from utils.const import WEIBO_DOMAIN
from utils.const import WEIBO_VERSION
from utils.errors import WeiboAPIError
from utils.errors import SSOBaseException
class HttpObject(object):
def __init__(self, client, method):
self.client = client
self.method = method
def __getattr__(self, attr):
def wrap(**kwargs):
if self.client.is_expires():
raise WeiboAPIError('21327', 'expired_token')
return request(self.method,
'%s%s.json' % (self.client.api_url,
attr.replace('__', '/')),
self.client.access_token,
**kwargs)
return wrap
class APIClient(object):
""" API client using synchronized invocation """
def __init__(self, app_key, app_secret, redirect_uri=None, response_type='code'):
self.client_id = app_key
self.client_secret = app_secret
self.redirect_uri = redirect_uri
self.response_type = response_type
self.auth_url = 'http://%s/oauth2/' % WEIBO_DOMAIN
self.api_url = 'https://%s/%s/' % (WEIBO_DOMAIN, WEIBO_VERSION)
self.api_url = 'http://%s/' % WEIBO_DOMAIN
self.access_token = None
self.expires = 0.0
self.get = HttpObject(self, 'GET')
self.post = HttpObject(self, 'POST')
self.upload = HttpObject(self, 'UPLOAD')
def set_access_token(self, access_token, expires_in):
self.access_token = str(access_token)
self.expires = float(expires_in)
def get_authorize_url(self, redirect_uri=None, display='default'):
""" return the authroize url that should be redirect """
redirect = redirect_uri if redirect_uri else self.r | edirect_uri
if not redirect:
raise WeiboAPIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
kwargs = dict(client_id=self.client_i | d,
response_type='code',
display=display,
redirect_uri=redirect)
encoded_params, _ = encode_params('GET', **kwargs)
print encoded_params
return '%s%s?%s' % (self.auth_url, 'authorize', encoded_params)
def request_access_token(self, code, redirect_uri=None):
"""
return access token as object:
{"access_token":"your-access-token","expires_in":12345678}
expires_in is standard unix-epoch-time
"""
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise WeiboAPIError('21305', 'Parameter absent: redirect_uri')
r = request('GET', '%s%s' % (self.auth_url, 'access_token'),
client_id=self.client_id, client_secret=self.client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r.expires_in += int(time.time())
return r
def is_expires(self):
return not self.access_token or time.time() > self.expires
def __getattr__(self, attr):
return getattr(self.get, attr)
|
henare/parlparse | pyscraper/gettwittermps.py | Python | agpl-3.0 | 1,744 | 0.008601 | #!/usr/bin/python
import urllib2
import csv
import xml.sax
uri = "http://spreadsheets.google.com/tq?tqx=out:csv&key=0AjWA_TWMI4t_dFI5MWRWZkRWbFJ6MVhHQzVmVndrZnc&hl=en_GB"
f = urllib2.urlopen(uri)
csv_data = f.read()
lines = csv_data.split("\n")
rows = csv.reader(lines.__iter__(), delimiter=',', quotechar='"')
class PeopleParser(xml.sax.handler.ContentHandler):
def __init__(self):
self.parser = xml.sax.make_parser()
self.parser.setContentHandler(self)
def parse(self,filename):
self.office_id_to_person_id = {}
self.parser.parse(filename)
def startElement(self,name,attrs):
if name == 'person':
self.current_person_id = attrs['id']
elif name == 'office':
self.office_id_to_person_id[attrs['id | ']] = self.current_person_id
def endElement(self,n | ame):
if name == 'person':
self.current_person_id = None
people_parser = PeopleParser()
people_parser.parse("../members/people.xml")
person_id_to_twitter_username = {}
output_filename = "../members/twitter-commons.xml"
fp = open(output_filename,"w")
fp.write('''<?xml version="1.0" encoding="ISO-8859-1"?>
<publicwhip>
''')
for r in rows:
if len(r) < 5:
continue
member_id = r[2]
twitter_username = r[4]
if member_id == "url":
# That's the header line...
continue
if len(twitter_username) == 0:
continue
if member_id not in people_parser.office_id_to_person_id:
raise "No person ID found for %s in line %s" % (member_id,"#".join(r))
person_id = people_parser.office_id_to_person_id[member_id]
fp.write("<personinfo id=\"%s\" twitter_username=\"%s\"/>\n"%(person_id,twitter_username))
fp.write("</publicwhip>")
|
batermj/algorithm-challenger | code-analysis/programming_anguage/python/source_codes/Python3.5.9/Python-3.5.9/Lib/test/test_email/test__header_value_parser.py | Python | apache-2.0 | 117,222 | 0.00564 | import string
import unittest
from | email import _header_value_parser as parser
from email import errors
from email i | mport policy
from test.test_email import TestEmailBase, parameterize
class TestTokens(TestEmailBase):
# EWWhiteSpaceTerminal
def test_EWWhiteSpaceTerminal(self):
x = parser.EWWhiteSpaceTerminal(' \t', 'fws')
self.assertEqual(x, ' \t')
self.assertEqual(str(x), '')
self.assertEqual(x.value, '')
self.assertEqual(x.encoded, ' \t')
# UnstructuredTokenList
def test_undecodable_bytes_error_preserved(self):
badstr = b"le pouf c\xaflebre".decode('ascii', 'surrogateescape')
unst = parser.get_unstructured(badstr)
self.assertDefectsEqual(unst.all_defects, [errors.UndecodableBytesDefect])
parts = list(unst.parts)
self.assertDefectsEqual(parts[0].all_defects, [])
self.assertDefectsEqual(parts[1].all_defects, [])
self.assertDefectsEqual(parts[2].all_defects, [errors.UndecodableBytesDefect])
class TestParserMixin:
def _assert_results(self, tl, rest, string, value, defects, remainder,
comments=None):
self.assertEqual(str(tl), string)
self.assertEqual(tl.value, value)
self.assertDefectsEqual(tl.all_defects, defects)
self.assertEqual(rest, remainder)
if comments is not None:
self.assertEqual(tl.comments, comments)
def _test_get_x(self, method, source, string, value, defects,
remainder, comments=None):
tl, rest = method(source)
self._assert_results(tl, rest, string, value, defects, remainder,
comments=None)
return tl
def _test_parse_x(self, method, input, string, value, defects,
comments=None):
tl = method(input)
self._assert_results(tl, '', string, value, defects, '', comments)
return tl
class TestParser(TestParserMixin, TestEmailBase):
# _wsp_splitter
rfc_printable_ascii = bytes(range(33, 127)).decode('ascii')
rfc_atext_chars = (string.ascii_letters + string.digits +
"!#$%&\'*+-/=?^_`{}|~")
rfc_dtext_chars = rfc_printable_ascii.translate(str.maketrans('','',r'\[]'))
def test__wsp_splitter_one_word(self):
self.assertEqual(parser._wsp_splitter('foo', 1), ['foo'])
def test__wsp_splitter_two_words(self):
self.assertEqual(parser._wsp_splitter('foo def', 1),
['foo', ' ', 'def'])
def test__wsp_splitter_ws_runs(self):
self.assertEqual(parser._wsp_splitter('foo \t def jik', 1),
['foo', ' \t ', 'def jik'])
# get_fws
def test_get_fws_only(self):
fws = self._test_get_x(parser.get_fws, ' \t ', ' \t ', ' ', [], '')
self.assertEqual(fws.token_type, 'fws')
def test_get_fws_space(self):
self._test_get_x(parser.get_fws, ' foo', ' ', ' ', [], 'foo')
def test_get_fws_ws_run(self):
self._test_get_x(parser.get_fws, ' \t foo ', ' \t ', ' ', [], 'foo ')
# get_encoded_word
def test_get_encoded_word_missing_start_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_encoded_word('abc')
def test_get_encoded_word_missing_end_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_encoded_word('=?abc')
def test_get_encoded_word_missing_middle_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_encoded_word('=?abc?=')
def test_get_encoded_word_valid_ew(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?this_is_a_test?= bird',
'this is a test',
'this is a test',
[],
' bird')
def test_get_encoded_word_internal_spaces(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?this is a test?= bird',
'this is a test',
'this is a test',
[errors.InvalidHeaderDefect],
' bird')
def test_get_encoded_word_gets_first(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?first?= =?utf-8?q?second?=',
'first',
'first',
[],
' =?utf-8?q?second?=')
def test_get_encoded_word_gets_first_even_if_no_space(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?first?==?utf-8?q?second?=',
'first',
'first',
[],
'=?utf-8?q?second?=')
def test_get_encoded_word_sets_extra_attributes(self):
ew = self._test_get_x(parser.get_encoded_word,
'=?us-ascii*jive?q?first_second?=',
'first second',
'first second',
[],
'')
self.assertEqual(ew.encoded, '=?us-ascii*jive?q?first_second?=')
self.assertEqual(ew.charset, 'us-ascii')
self.assertEqual(ew.lang, 'jive')
def test_get_encoded_word_lang_default_is_blank(self):
ew = self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?first_second?=',
'first second',
'first second',
[],
'')
self.assertEqual(ew.encoded, '=?us-ascii?q?first_second?=')
self.assertEqual(ew.charset, 'us-ascii')
self.assertEqual(ew.lang, '')
def test_get_encoded_word_non_printable_defect(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?first\x02second?=',
'first\x02second',
'first\x02second',
[errors.NonPrintableDefect],
'')
def test_get_encoded_word_leading_internal_space(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?=20foo?=',
' foo',
' foo',
[],
'')
def test_get_encoded_word_quopri_utf_escape_follows_cte(self):
# Issue 18044
self._test_get_x(parser.get_encoded_word,
'=?utf-8?q?=C3=89ric?=',
'Éric',
'Éric',
[],
'')
# get_unstructured
def _get_unst(self, value):
token = parser.get_unstructured(value)
return token, ''
def test_get_unstructured_null(self):
self._test_get_x(self._get_unst, '', '', '', [], '')
def test_get_unstructured_one_word(self):
self._test_get_x(self._get_unst, 'foo', 'foo', 'foo', [], '')
def test_get_unstructured_normal_phrase(self):
self._test_get_x(self._get_unst, 'foo bar bird',
'foo bar bird',
'foo bar bird',
[],
'')
def test_get_unstructured_normal_phrase_with_whitespace(self):
self._test_get_x(self._get_unst, 'foo \t bar bird',
'foo \t bar bird',
'foo bar bird',
[],
'')
def test_get_unstructured_leading_whitespace(self):
self._test_get_x(self._get_unst, ' foo bar',
' foo bar',
' foo bar',
[],
'')
def test_get_unstructured_trailing_whi |
rdeits/cryptics | pycryptics/crypticweb/server.py | Python | mit | 2,649 | 0.002643 | import web
from pycryptics.solve_clue import CrypticClueSolver, split_clue_text
import webbrowser
import re
# from fake_solve_clue import FakeCrypticClueSolver as CrypticClueSolver
# from fake_solve_clue import split_clue_text
SERVER = "http://localhost:8080/solve/"
class index:
def GET(self):
return render.index(SERVER)
class solve:
def GET(self, clue):
clue = clue.strip()
if clue != "":
if not re.match(r"[^\(\)]*\([0-9]+ *[,[0-9 ]*]*\)[ \.a-zA-Z]*", clue):
return render.solver(None, clue, "I don't quite understand the formatting of that clue. Please make sure that the clue is of the form: <br>clue text (length)<br>or<br>clue text (length) pattern<br> as in the examples above.")
try:
phrases, lengths, pattern, answer = split_clue_text(clue)
if sum(lengths) != len(pattern) and pattern != '':
print "length mismatch"
return render.solver(None, clue, "The length of the pattern must exactly match the number of letters in the answer, or you can just leave it blank. Here are some allowable patterns:<br>(5) ....s<br>(3,2) a.e..<br>(9)<br>")
assert len(pattern) == 0 or len(pattern) == sum(lengths), "Answer lengths and length of pattern string must match: sum(%s) != %d" % (lengths, len(pattern))
except Exception as e:
raise e
print e
return render.solver(None, clue, "Something went wrong that I don't know how to handle. Here's python's attempt at an explanation:<br>" + str(e))
if len(phrases) > 7:
return render.solver(None, clue, "Sorry, I can't reliably handle clues longer than 7 phrases yet. Try grouping some words into phrases by putting an underscore instead of a space between them")
solver.setup(clue)
solver.run()
answers = solver.collect_answers()
print "returning:", answers
return render.solver(answers, solver.clue_text, "")
| else: |
return render.solver(None, "", "")
# class halt:
# def POST(self):
# print "trying to halt"
# solver.stop()
# raise web.seeother('/')
if __name__ == '__main__':
render = web.template.render('pycryptics/crypticweb/templates/')
urls = ('/', 'index',
'/solve/(.*)', 'solve')
solver = CrypticClueSolver()
app = web.application(urls, globals())
print "Starting up server. Press Ctrl+c to shut down"
# webbrowser.open("http://localhost:8080", new=2)
app.run()
print "Shutting down...."
|
ActianCorp/dbmv | bin/driverTools.py | Python | apache-2.0 | 12,678 | 0.004417 | #!/usr/bin/env python
# -*- coding: utf-8 -*
# Copyright 2015 Actian Corporation
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import os
import xml.dom.minidom
import logging
from string import maketrans
from string import Template
try:
# Module for Ingres transactional database
import ingresdbi
except ImportError:
pass
try:
# Module for Oracle
import cx_Oracle
except ImportError:
pass
try:
# Module for Sybase ASE
import Sybase
except ImportError:
pass
try:
# Module for MsSql
import pymssql
except ImportError:
pass
try:
# Module for Mysql
import MySQLdb
except ImportError:
pass
try:
# Postgres module
import psycopg2
import psycopg2.extensions
except ImportError:
pass
try:
# Module for ODBC
import pyodbc
pyodbc.pooling = False
except ImportError:
pass
try:
# Module for DB2
import DB2
except ImportError:
pass
# Default databases used when no database has been specified in connect string
defdbs = {"mysql": "mysql", "oracle": "sys", "mssql": "master",
"teradata": "dbc", "postgres": "postgres", "greenplum": "postgres",
"db2": "dsndd04", "ase": "master", "progress": "sysprogress",
"maxdb": "sysinfo", "ingres": "iidbdb", "vector": "iidbdb",
"asa": "sys", "iq": "sys", "hana": "sys", "zen": "demodata",
"matrix": "dev","vectorh": "iidbdb","actianx": "iidbdb","avalanche": "db",
"netezza": "nz"
}
# Default port used when no port has been specified in connect string
defports = {"mysql": "3306", "oracle": "1521", "mssql": "1433",
"teradata": "1025", "postgres": "5432", "greenplum": "5432",
"db2": "446", "ase": "5000", "progress": "8104",
"maxdb": "7200", "ingres": "II", "vector": "VW",
"asa": "2638", "iq": "2638", "hana": "00", "zen": "1531",
"matrix": "1439", "vectorh": "VH", "actianx": "II", "avalanche": "VW",
"netezza": 5480
}
# Error table
errors = {"wrong_db_string": "Wrong format for dbconnect. Given: %s, expected: db='dbtype[-odbc]://hostname[:port][/dbname[?user[&Pass]]]'",
"unknown_db_type": "This type of database is unknown", "unknown_driver": "Unknown driver"}
# Environnement variables
g_lib = os.path.dirname(__file__)
ODBCINI = ("%s/../etc/%s.odbc") % (g_lib, __name__)
XMLINI = ("%s/../etc/%s.xml") % (g_lib, __name__)
II_DATE_FORMAT = 'SWEDEN' # INGRESDATE datatype formated as '2006-12-15 12:30:55'
os.environ['ODBCINI'] = ODBCINI
os.environ['II_DATE_FORMAT'] = II_DATE_FORMAT
def perror(p_error, p_value=None):
'''
perror raise NameError and print message
'''
s = errors[p_error]
if p_value is not None:
s = "%s : %s" % (s, p_value)
raise NameError(s)
# Extract string details
# ---------------------------------------------------------
def getDbStringDetails(p_db):
db = p_db
pattern = re.compile(
r"^(\w+)(-odbc)?://([a-zA-Z0-9_-]+[\.a-zA-Z0-9_-]*):?([a-zA-Z0-9]*)/?([a-zA-Z0-9_]?[\.a-zA-Z0-9_-]*)\??([\\\.a-z#A-Z0-9_-]*)&?([\\!\.a-zA-Z#0-9_-]*)$")
# Check parameter match : <dbtype>[-odbc] '://' <hostname [.FQDN]> ':' <port> '/' <dbname> '?' <user> '&' <pwd>
if not re.match(pattern, db):
perror("wrong_db_string", db)
(dbtype, driver, hostname, port, dbname, user, pwd) = pattern.search(db).groups()
if dbname == '':
# Setup a default dbname if parameter has been omitted
dbname = defdbs[dbtype]
if port == '':
port = defports[dbtype] # Setup default port
if user == '':
user = 'P02Zs5vTR'
if pwd == '':
pwd = 'XFNsldj12xxxt'
if driver not in [None, '-odbc']:
perror("unknown_driver", driver)
return((dbtype, driver, hostname, port, dbname, user, pwd))
def getXMLdata(p_key1, p_key2=None, p_key3=None):
'''
Get Indexed XML data from XML file.
'''
result = ""
xmldoc = xml.dom.minidom.parse(XMLINI)
if (p_key2, p_key3) == (None, None):
node = xmldoc.getElementsByTagName(p_key1)[0]
result = node.childNodes[0].data
| else:
for node in xmldoc.getElementsByTagName(p_key1)[0].getElem | entsByTagName(p_key2):
if node.getAttribute("id") == p_key3:
for child in node.childNodes:
if child.nodeType == xml.dom.minidom.Node.TEXT_NODE:
result = child.data
return(result)
class dbconnector:
def __init__(self, p_db, connect = True):
'''
Parameter db="dbtype://hostname:port/dbname?
mysql://localhost:3306/HerongDB?user&password
'''
db = p_db
self.db = None
self.cursor = None
self.dbtype = None
self.logger = logging.getLogger(__name__)
try:
(self.dbtype, driver, hostname, port, dbname, user, pwd) = getDbStringDetails(db)
if (self.dbtype in ["teradata", "maxdb"]) or (driver == "-odbc"):
if(self.dbtype == "mssql"):
# Azure DB connection
driverValue = "{ODBC Driver 13 for SQL Server}"
self.db = pyodbc.connect(
host=hostname, port=port, database=dbname, user=user, password=pwd, driver=driverValue)
else:
dsn = self.odbc(hostname, port, dbname)
self.db = pyodbc.connect(
dsn=dsn, user=user, password=pwd, ansi=True, autocommit=True)
self.cursor = self.db.cursor()
elif self.dbtype == "ase":
# hostname defined in interface file
self.db = Sybase.connect(
dsn=hostname, user=user, passwd=pwd, database=dbname, auto_commit=True)
self.cursor = self.db.cursor()
self.cursor.execute("set quoted_identifier on")
elif self.dbtype in ["asa", "iq"]:
import sqlanydb # Module for Sybase ASA or IQ
s = "%s" % (hostname)
self.db = sqlanydb.connect(
eng=s, userid=user, password=pwd, dbn=dbname)
self.cursor = self.db.cursor()
elif self.dbtype == "mssql":
s = "%s:%s" % (hostname, port)
self.db = pymssql.connect(
host=s, user=user, password=pwd, database=dbname, as_dict=False)
self.cursor = self.db.cursor()
elif self.dbtype == "mysql":
self.db = MySQLdb.connect(host=hostname, port=int(
port), user=user, passwd=pwd, db=dbname)
self.cursor = self.db.cursor()
elif self.dbtype == "db2":
self.db = DB2.connect(dsn=dbname, uid=user, pwd=pwd)
self.cursor = self.db.cursor()
elif self.dbtype in ["postgres", "greenplum"]:
s = "host='%s' port='%s' user='%s' password='%s' dbname='%s'" % (
hostname, port, user, pwd, dbname)
self.db = psycopg2.connect(s)
self.db.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
self.cursor = self.db.cursor()
elif self.dbtype == "oracle":
s = "%s/%s@(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=%s)(PORT=%s))(CONNECT_DATA=(SERVICE_NAME=%s)))"
s = s % (user, pwd, hostname, port, dbname)
self.db = cx_Oracle.connect(s)
self.cursor = s |
cryvate/project-euler | project_euler/solutions/problem_41.py | Python | mit | 441 | 0 | from itertools import permutations
from ..library.number_theory.primes import is_prime
from ..library.base import list_to_number
def solve() -> int:
for n in range(9, -1, -1):
if sum(range(n + 1)) % 3 == 0:
continue # always divisible by 3
for permutation in permutations(range(n, 0, -1)):
| number = list_to_number(permutation)
if is_prime(number):
return number
| |
vicnet/weboob | modules/ratp/test.py | Python | lgpl-3.0 | 1,363 | 0.002201 | # -*- coding: utf-8 -*-
# Copyright(C) 2017 Phyks (Lucas Verney)
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from w | eboob.tools.test import BackendTest
class RATPTest(BackendTest):
MODULE = 'ratp'
def test_ | ratp_gauges(self):
l = list(self.backend.iter_gauges())
assert len(l) == 26
def test_ratp_gauges_filter(self):
l = list(self.backend.iter_gauges(pattern="T3A"))
assert len(l) == 1
def test_ratp_sensors(self):
l = list(self.backend.iter_sensors("ligne_metro_4"))
assert len(l) == 1
def test_ratp_status(self):
m = self.backend.get_last_measure("ligne_metro_4_sensor")
assert m.level <= 0.0
|
mp4096/controlboros | controlboros/tests/test_state_space.py | Python | bsd-3-clause | 4,081 | 0 | """Tests for the LTI discrete-time systems."""
from controlboros import StateSpace
import numpy as np
import pytest
def test_dynamics_single_input():
"""Test dynamics equation with single input."""
a = np.array([[1.0, 2.0], [0.0, 1.0]])
b = np.array([[1.0], [3.0]])
c = np.zeros((1, 2))
s = StateSpace(a, b, c)
assert np.all(s.dynamics([1.0, 1.0], [1.0]) == np.array([4.0, 4.0]))
def test_dynamics_multiple_inputs():
"""Test dynamics equation with multiple inputs."""
a = np.array([[1.0, 2.0], [0.0, 1.0]])
b = np.array([[1.0, 2.0], [3.0, 2.0]])
c = np.zeros((1, 2))
s = StateSpace(a, b, c)
assert np.all(s.dynamics([1.0, 1.0], [1.0, 0.0]) == np.array([4.0, 4.0]))
def test_output_siso():
"""Test output equation with single input, single output."""
a = np.zeros((2, 2))
b = np.zeros((2, 1))
c = np.array([[1.0, 1.0]])
d = np.array([[2.0]])
s = StateSpace(a, b, c, d)
assert s.output([1.0, 1.0], [1.0]) == np.array([4.0])
def test_output_simo():
"""Test output equation with single input, multiple outputs."""
a = np.zeros((2, 2))
b = np.zeros((2, 1))
c = np.eye(2)
d = np.array([[2.0], [3.0]])
s = StateSpace(a, b, c, d)
assert np.all(s.output([1.0, 1.0], [1.0]) == np.array([3.0, 4.0]))
def test_output_miso():
"""Test output equation with multiple inputs, single output."""
a = np.zeros((2, 2))
b = np.zeros((2, 2))
c = np.array([[1.0, 1.0]])
d = np.array([[2.0, 3.0]])
s = StateSpace(a, b, c, d | )
assert np.all(s.output([1.0, 1.0], [1.0, 1.0]) == np.array([7.0]))
def test_output_mimo():
"""Test output equation with multiple inputs, multiple outputs."""
a = np.zeros((2, 2))
b = np.zeros((2, 2))
c = np.eye(2)
d = np.eye(2)
s = StateSpace(a, b, c, d)
assert np.all(s.output([1.0, 1.0], [2.0, 3.0]) == np.array([3.0, 4.0]))
def test_invalid_state_matrix_dimensions():
"""Test exception if A is | not square."""
a = np.zeros((2, 1))
b = np.zeros((2, 1))
c = np.zeros((1, 2))
with pytest.raises(ValueError) as excinfo:
StateSpace(a, b, c)
assert "Invalid matrix dimensions" in str(excinfo.value)
def test_invalid_input_matrix_dimensions():
"""Test exception if B and A have different number of rows."""
a = np.zeros((2, 2))
b = np.zeros((1, 1))
c = np.zeros((1, 2))
with pytest.raises(ValueError) as excinfo:
StateSpace(a, b, c)
assert "Invalid matrix dimensions" in str(excinfo.value)
def test_invalid_output_matrix_dimensions():
"""Test exception if C and A have different number of columns."""
a = np.zeros((2, 2))
b = np.zeros((2, 1))
c = np.zeros((1, 3))
with pytest.raises(ValueError) as excinfo:
StateSpace(a, b, c)
assert "Invalid matrix dimensions" in str(excinfo.value)
def test_invalid_feedthrough_matrix_dimensions():
"""Test exception if D does not match to B and C."""
a = np.zeros((2, 2))
b = np.zeros((2, 3))
c = np.zeros((4, 2))
d = np.zeros((4, 2))
with pytest.raises(ValueError) as excinfo:
StateSpace(a, b, c, d)
assert "Invalid matrix dimensions" in str(excinfo.value)
def test_human_friendly_form():
"""Test the __str__() method of a StateSpace object."""
a = np.array([[1.0, 2.0], [0.0, 1.0]])
b = np.array([[1.0, 2.0], [3.0, 2.0]])
c = np.zeros((1, 2))
s = StateSpace(a, b, c)
reference = \
"LTI discrete-time system.\n\n" \
"State matrix A:\n" \
"[[ 1. 2.]\n" \
" [ 0. 1.]]\n\n" \
"Input matrix B:\n" \
"[[ 1. 2.]\n" \
" [ 3. 2.]]\n\n" \
"Output matrix C:\n" \
"[[ 0. 0.]]\n\n" \
"Feedthrough matrix D:\n" \
"[[ 0. 0.]]\n"
assert s.__str__() == reference
def test_auto_feedthrough_matrix():
"""Test if a zero feedthrough matrix is created if none specified."""
a = np.zeros((2, 2))
b = np.zeros((2, 3))
c = np.zeros((4, 2))
s = StateSpace(a, b, c)
assert s.d.shape == (4, 3)
|
harishkrao/DSE200x | Week-1-Intro-new/customplot.py | Python | mit | 940 | 0.026596 | #
# First, let us create some utility functions for Plotting
#
def pd_centers(featuresUsed, centers):
from itertools import cycle, islice
from pandas.tools.plotting import parallel_coordinates
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
colNames = list(featuresUsed)
colNames.append('prediction')
# Zip with a column called 'prediction' (index)
Z = [np.append(A, index) for index, A in enumerate(centers)]
# Convert to pandas for plotting
P = pd.DataFrame(Z, columns=colNames)
P['prediction'] = P['prediction'].astype(int)
return P
def parallel_plot(data):
from i | tertools import cycle, islice
from pandas.tools.plotting import parallel_coordinates
import matplotlib.pyplot as plt
my_colors = list(islice(cycle(['b', 'r | ', 'g', 'y', 'k']), None, len(data)))
plt.figure(figsize=(15,8)).gca().axes.set_ylim([-2.5,+2.5])
parallel_coordinates(data, 'prediction', color = my_colors, marker='o') |
cpennington/edx-platform | openedx/core/djangoapps/content/course_overviews/tests/factories.py | Python | agpl-3.0 | 1,152 | 0 |
from datetime import timedelta
import json
from django.utils import timezone
import factory
from factory.django import DjangoModelFactory
from opaque_keys.edx.locator import CourseLocator
from ..models import CourseOverview
class CourseOverviewFactory(DjangoModelFactory):
class Meta(object):
model = CourseOverview
django_get_or_create = ('id', )
exclude = ('run', )
version = CourseOverview.VERSION
pre_requisite_courses = []
org = 'edX'
run = factory.Sequence('2012_Fall_{}'.format)
@factory.lazy_attribute
def _pre_requisite_courses_json(self):
return json.dumps(self.pre_requisite_courses)
@factory. | lazy_attribute
def _location(self):
return self.id.make_usage_key('course', 'course')
@factory.lazy_attribute
def id(self):
return CourseLocator(self.org, 'toy', self | .run)
@factory.lazy_attribute
def display_name(self):
return "{} Course".format(self.id)
@factory.lazy_attribute
def start(self):
return timezone.now()
@factory.lazy_attribute
def end(self):
return timezone.now() + timedelta(30)
|
KevinGoodsell/sympy | sympy/simplify/simplify.py | Python | bsd-3-clause | 50,745 | 0.00335 | from sympy import SYMPY_DEBUG
from sympy.core import Basic, S, C, Add, Mul, Pow, Rational, Integer, \
Derivative, Wild, Symbol, sympify, expand, expand_mul, expand_func, \
Function, Equality
from sympy.core.numbers import igcd
from sympy.core.relational import Equality
from sympy.utilities import make_list, all, any, flatten
from sympy.functions import gamma, exp, sqrt, log
from sympy.simplify.cse_main import cse
from sympy.polys import Poly, factor, PolynomialError
import sympy.mpmath as mpmath
def fraction(expr, exact=False):
"""Returns a pair with expression's numerator and denominator.
If the given expression is not a fraction then this function
will assume that the denominator is equal to one.
This function will not make any attempt to simplify nested
fractions or to do any term rewriting at all.
If only one of the numerator/denominator pair is needed then
use numer(expr) or denom(expr) functions respectively.
>>> from sympy import *
>>> x, y = symbols('x', 'y')
>>> fraction(x/y)
(x, y)
>>> fraction(x)
(x, 1)
>>> fraction(1/y**2)
(1, y**2)
>>> fraction(x*y/2)
(x*y, 2)
>>> fraction(Rational(1, 2))
(1, 2)
This function will also work fine with assumptions:
>>> k = Symbol('k', negative=True)
>>> fraction(x * y**k)
(x, y**(-k))
If we know nothing about sign of some exponent and 'exact'
flag is unset, then structure this exponent's structure will
be analyzed and pretty fraction will be returned:
>>> fraction(2*x**(-y))
(2, x**y)
#>>> fraction(exp(-x))
#(1, exp(x))
>>> fraction(exp(-x), exact=True)
(exp(-x), 1)
"""
expr = sympify(expr)
numer, denom = [], []
for term in make_list(expr, Mul):
if term.is_Pow:
if term.exp.is_negative:
if term.exp is S.NegativeOne:
denom.append(term.base)
else:
denom.append(Pow(term.base, -term.exp))
elif not exact and term.exp.is_Mul:
coeff, tail = term.exp.args[0], Mul(*term.exp.args[1:])#term.exp.getab()
if coeff.is_Rational and coeff.is_negative:
denom.append(Pow(term.base, -term.exp))
else:
numer.append(term)
else:
numer.append(term)
elif term.func is C.exp:
if term.args[0].is_negative:
denom.append(C.exp(-term.args[0]))
elif not exact and term.args[0].is_Mul:
coeff, tail = term.args[0], Mul(*term.args[1:])#term.args.getab()
if coeff.is_Rational and coeff.is_negative:
denom.append(C.exp(-term.args[0]))
else:
numer.append(term)
else:
numer.append(term)
elif term.is_Rational:
if term.is_integer:
numer.append(term)
else:
numer.append(Rational(term.p))
denom.append(Rational(term.q))
else:
numer.append(term)
return Mul(*numer), Mul(*denom)
def numer(expr):
return fraction(expr)[0]
def denom(expr):
return fraction(expr)[1]
def fraction_expand(expr):
a, b = fraction(expr)
return a.expand() / b.expand()
def numer_expand(expr):
a, b = fraction(expr)
return a.expand() / b
def denom_expand(expr):
a, b = fraction(expr)
return a / b.expand()
def separate(expr, deep=False):
"""Rewrite or separate a power of product to a product of powers
but without any expanding, i.e., rewriting products to summations.
>>> from sympy import *
>>> x, y, z = symbols('x', 'y', 'z')
>>> separate((x*y)**2)
x**2*y**2
>>> separate((x*(y*z)**3)**2)
x**2*y**6*z**6
>>> separate((x*sin(x))**y + (x*cos(x))**y)
x**y*cos(x)**y + x**y*sin(x)**y
>>> separate((exp(x)*exp(y))**x)
exp(x**2)*exp(x*y)
>>> separate((sin(x)*cos(x))**y)
cos(x)**y*sin(x)**y
Notice that summations are left untouched. If this is not the
requested behavior, apply 'expand' to input expression before:
>>> separate(((x+y)*z)**2)
z**2*(x + y)**2
>>> separate((x*y)**(1+z))
x**(1 + z)*y**(1 + z)
"""
expr = sympify(expr)
if expr.is_Pow:
terms, expo = [], separate(expr.exp, deep)
if expr.base.is_Mul:
t = [ separate(C.Pow(t,expo), deep) for t in expr.base.args ]
return C.Mul(*t)
elif expr.base.func is C.exp:
if deep == True:
return C.exp(separate(expr.base[0], deep)*expo)
else:
return C.exp(expr.base[0]*expo)
else:
return C.Pow(separate(expr.base, deep), expo)
elif expr.is_Add or expr.is_Mul:
return type(expr)(*[ separate(t, deep) for t in expr.args ])
elif expr.is_Function and deep:
return expr.func(*[ separate(t) for t in expr.args])
else:
return expr
def together(expr, deep=False):
"""Combine together and denest rational functions into a single
fraction. By default the resulting expression is simplified
to reduce the total order of both numerator and denominator
and minimize the number of terms.
Denesting is done recursively on fractions level. However this
function will not attempt to rewrite composite objects, like
functions, interior unless 'deep' flag is set.
By definition, 'together' is a complement to 'apart', so
apart(together(expr)) should left expression unchanged.
>>> from sympy import *
>>> x, y, z = symbols('x', 'y', 'z')
You can work with sums of fractions easily. The algorithm
used here will, in an iterative style, collect numerators
and denominator of all expressions involved and perform
needed simplifications:
>>> together(1/x + 1/y)
(x + y)/(x*y)
| >>> together(1/x + | 1/y + 1/z)
(x*y + x*z + y*z)/(x*y*z)
>>> together(1/(x*y) + 1/y**2)
(x + y)/(x*y**2)
Or you can just denest multi-level fractional expressions:
>>> together(1/(1 + 1/x))
x/(1 + x)
It also perfect possible to work with symbolic powers or
exponential functions or combinations of both:
>>> together(1/x**y + 1/x**(y-1))
x**(-y)*(1 + x)
#>>> together(1/x**(2*y) + 1/x**(y-z))
#x**(-2*y)*(1 + x**(y + z))
#>>> together(1/exp(x) + 1/(x*exp(x)))
#(1+x)/(x*exp(x))
#>>> together(1/exp(2*x) + 1/(x*exp(3*x)))
#(1+exp(x)*x)/(x*exp(3*x))
"""
def _together(expr):
from sympy.core.function import Function
if expr.is_Add:
items, coeffs, basis = [], [], {}
for elem in expr.args:
numer, q = fraction(_together(elem))
denom = {}
for term in make_list(q.expand(), Mul):
expo = S.One
coeff = S.One
if term.is_Pow:
if term.exp.is_Rational:
term, expo = term.base, term.exp
elif term.exp.is_Mul:
coeff, tail = term.exp.as_coeff_terms()
if coeff.is_Rational:
tail = C.Mul(*tail)
term, expo = Pow(term.base, tail), coeff
coeff = S.One
elif term.func is C.exp:
if term.args[0].is_Rational:
term, expo = S.Exp1, term.args[0]
elif term.args[0].is_Mul:
coeff, tail = term.args[0].as_coeff_terms()
if coeff.is_Rational:
tail = C.Mul(*tail)
term, expo = C.exp(tail), coeff
coeff |
flavoi/diventi | diventi/accounts/migrations/0331_auto_20200612_0849.py | Python | apache-2.0 | 453 | 0 | # Generated by Django 2.2.13 on 2020-06-12 06:49
import | diventi.accounts.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0330_auto_20200612_0843'),
]
operations = [
migrations.AlterModelManagers(
name='d | iventiuser',
managers=[
('objects', diventi.accounts.models.DiventiUserManager()),
],
),
]
|
SickGear/SickGear | lib/imdbpie/constants.py | Python | gpl-3.0 | 292 | 0 | # -*- coding: utf-8 -*-
fr | om __future__ import absolute_import, unicode_literals
HOST = 'api.imdbws.com'
BASE_URI = 'https://{0}'.format(HOST)
SEARCH_BASE_URI = 'https: | //v2.sg.media-imdb.com'
USER_AGENT = 'IMDb/8.3.1 (iPhone9,4; iOS 11.2.1)'
APP_KEY = '76a6cc20-6073-4290-8a2c-951b4580ae4a'
|
Daverball/reconos | tools/python/mhstools.py | Python | gpl-2.0 | 7,320 | 0.016396 | #!/usr/bin/env python
# coding: utf8
# ____ _____
# ________ _________ ____ / __ \/ ___/
# / ___/ _ \/ ___/ __ \/ __ \/ / / /\__ \
# / / / __/ /__/ /_/ / / / / /_/ /___/ /
# /_/ \___/\___/\____/_/ /_/\____//____/
#
# ======================================================================
#
# project: ReconOS
# author: Enno Lübbers, University of Paderborn
# description: API for parsing and manipulationg mhs files.
#
# ======================================================================
import string
import sys
# return a binary representation of a number
# x: number
# n: number of binary digits
def ntob(x, n):
s = "";
for i in range(0, n):
if (x << i) & (1 << n-1):
s += "1";
else:
s += "0";
r | eturn s;
class MHSLine:
"""
This class represents a single line of a mhs file
fields: self.type : the first word on the line (eg. PARAMETER, PORT,...)
self.content: list containing key/value pairs
"""
def __init__(self, line, line_num = 0):
s = line.split()
self.type = s[0]
s = " ".join(s[1:])
s = s.split(",")
self.content = []
| self.line_num = line_num
for x in s:
y = map(lambda x: x.strip(), x.split("="))
if not len(y) == 2:
raise "parse error at line %i" % line_num
self.content.append((y[0],y[1]))
def __str__(self):
s = self.type + " " + self.content[0][0] + " = " + str(self.content[0][1])
for k in self.content[1:]:
s += ", " + k[0] + " = " + k[1]
return s
class MHSPCore:
"""
This class represents a pcore instance
fields: self.ip_name
self.instance_name
self.content : list of lines
"""
def __init__(self,ip_name):
self.ip_name = ip_name
self.content = []
def addLine(self,line):
if line.type == "PARAMETER" and line.content[0][0] == "INSTANCE":
self.instance_name = line.content[0][1]
return
self.content.append(line)
def getValue(self,key):
for line in self.content:
if line.content[0][0].lower() == key.lower(): # MHS files are case insensitive
return line.content[0][1]
return None
def setValue(self,key,value):
for line in self.content:
if line.content[0][0] == key:
line.content[0] = (line.content[0][0],value)
def addEntry(self,name,key,value):
self.addLine(MHSLine(name + " " + key + " = " + str(value)))
def __str__(self):
result = "BEGIN " + self.ip_name + "\n"
result += "\tPARAMETER INSTANCE = " + self.instance_name + "\n"
for k in self.content:
result += "\t" + str(k) + "\n"
result += "END\n"
return result
class MHS:
"""
This class represents a mhs file.
fields: self.pcores : list of MHSPCore objects
self.toplevel : list of MHSLine objects
"""
def __init__(self, filename = None):
self.pcores = []
self.toplevel = [MHSLine("PARAMETER VERSION = 2.1.0",0)]
if filename:
self.parse(filename)
def isComment(self,line_trimmed):
return line_trimmed[0] == '#'
def addPCore(self,pcore):
self.pcores.append(pcore)
def parse(self,filename):
STATE_TOPLEVEL = 0
STATE_PCORE = 1
state = STATE_TOPLEVEL
line_count = 0
fin = open(filename,"r")
self.pcores = []
self.toplevel = []
pcore = None
while True:
line_count += 1
line = fin.readline()
if not line:
if state == STATE_PCORE:
raise "unexpected end of file: '%s' at line %i" % (filename,line_count)
break
line = line.strip()
if not line: continue
if self.isComment(line): continue
s = line.split()
name = s[0]
s = " ".join(s[1:])
if state == STATE_TOPLEVEL:
if name == "BEGIN":
state = STATE_PCORE
pcore = MHSPCore(s)
continue
else:
self.toplevel.append(MHSLine(line,line_count))
continue
else:
if name == "END":
state = STATE_TOPLEVEL
self.pcores.append(pcore)
continue
else:
pcore.addLine(MHSLine(line,line_count))
continue
def __str__(self):
result = ""
for k in self.toplevel:
result += str(k) + "\n"
for pcore in self.pcores:
result += "\n" + str(pcore)
return result
def getPCores(self,ip_name):
result = []
for pcore in self.pcores:
if pcore.ip_name == ip_name:
result.append(pcore)
return result
def getPCore(self,instance_name):
for pcore in self.pcores:
if pcore.instance_name == instance_name:
return pcore
return None
def delPCore(self, instance_name):
pcore = self.getPcore(instance_name)
self.pcores.remove(pcore)
|
mjmottram/snoing | packages/xrootd.py | Python | mit | 2,509 | 0.003587 | #!/usr/bin/env python
#
# XRootD
#
# XRootD package installer.
#
# Author M Mottram - 15/04/2016 <m.mottram@qmul.ac.uk> : First revision
#######################################################################
import localpackage
import os
import stat
import shutil
class XRootD(localpackage.LocalPackage):
""" Base XRootD installer."""
def __init__(self, name, system, version):
""" Initialise the XRootD package."""
super(XRootD, self).__init__(name, system)
self._version = version
def get_tar_name(self):
""" Return the tarball name"""
return "xrootd-%s.tar.gz" % self._version
# Functions to override
def get_dependencies(self):
""" Return the dependency names as a list of names."""
return ["openssl-dev", "cmake-2.8.12"]
def _is_ | downloaded(self):
""" Check the tarball has been downloaded"""
return self._system.file_exists(self.get_tar_name())
| def _is_installed(self):
""" Check the script has been marked as executable."""
return self._system.file_exists("xrootd", os.path.join(self.get_install_path(), "bin")) and \
bool(os.stat(os.path.join(self.get_install_path(), "bin/xrootd")).st_mode & stat.S_IXUSR)
def _download(self):
""" Download XRootD"""
self._system.download_file("http://xrootd.org/download/v%s/%s" % (self._version,
self.get_tar_name()))
def _install(self):
""" Mark the script as executable"""
source_path = os.path.join(self._system.get_install_path(), "%s-source" % self._name)
self._system.untar_file(self.get_tar_name(), source_path, 1)
if not os.path.exists(self.get_install_path()):
os.makedirs(self.get_install_path())
cmake_opts = [source_path,
"-DCMAKE_INSTALL_PREFIX=%s" % self.get_install_path(),
"-DENABLE_PERL=FALSE"]
cmake_command = "cmake"
if self._dependency_paths["cmake-2.8.12"] is not None:
cmake_command = "%s/bin/cmake" % self._dependency_paths["cmake-2.8.12"]
self._system.configure_command(cmake_command, cmake_opts, self.get_install_path(),
config_type="xrootd")
self._system.execute_command("make", [], self.get_install_path())
self._system.execute_command("make", ["install"], self.get_install_path())
shutil.rmtree(source_path)
|
dovf/kitty | docs/source/conf.py | Python | gpl-2.0 | 9,549 | 0.005864 | # -*- coding: utf-8 -*-
#
# Kitty documentation build configuration file, created by
# sphinx-quickstart on Wed Jan 20 15:56:03 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.viewcode',
'sphinx.ext.graphviz',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['ntemplates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Kitty'
copyright = u'2016, Cisco SAS team'
author = u'Cisco SAS team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.6.8'
# The full version, including alpha/beta/rc tags.
release = '0.6.8'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#kee | p_warnings = False
# If true, `todo` and `todoL | ist` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['nstatic']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Kittydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Kitty.tex', u'Kitty Documentation',
u'Cisco SAS team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# |
wbyne/QGIS | scripts/generate_test_mask_image.py | Python | gpl-2.0 | 6,625 | 0.004377 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
***************************************************************************
generate_test_mask_image.py
---------------------
Date : February 2015
Copyright : (C) 2015 by Nyall Dawson
Email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Nyall Dawson'
__date__ = 'February 2015'
__copyright__ = '(C) 2015, Nyall Dawson'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
# Generates (or updates) a unit test image mask, which is used to specify whether
# a pixel in the control image should be checked (black pixel in mask) or not (white
# pixel in mask). For non black or white pixels, the pixels lightness is used to
# specify a maximum delta for each color component
import os
import sys
import argparse
from PyQt5.QtGui import QImage, QColor, qRed, qBlue, qGreen, qAlpha, qRgb
import struct
import urllib.request, urllib.error, urllib.parse
import glob
def error(msg):
print(msg)
sys.exit(1)
def colorDiff(c1, c2):
redDiff = abs(qRed(c1) - qRed(c2))
greenDiff = abs(qGreen(c1) - qGreen(c2))
blueDiff = abs(qBlue(c1) - qBlue(c2))
alphaDiff = abs(qAlpha(c1) - qAlpha(c2))
return max(redDiff, greenDiff, blueDiff, alphaDiff)
def imageFromPath(path):
if (path[:7] == 'http://' or path[:7] == 'file://' or path[:8] == 'https://'):
#fetch remote image
data = urllib.request.urlopen(path).read()
image = QImage()
image.loadFromData(data)
else:
image = QImage(path)
return image
def getControlImagePath(path):
if os.path.isfile(path):
return path
#else try and find matching test image
script_folder = os.path.dirname(os.path.realpath(sys.argv[0]))
control_images_folder = os.path.join(script_folder, '../tests/testdata/control_images')
matching_control_images = [x[0] for x in os.walk(control_images_folder) if path in x[0]]
if len(matching_control_images) > 1:
error('Found multiple matching control images for {}'.format(path))
elif len(matching_control_images) == 0:
error('No matching control images found for {}'.format(path))
found_control_image_path = matching_control_images[0]
#check for a single matching expected image
images = glob.glob(os.path.join(found_control_image_path, '*.png'))
filtered_images = [i for i in images if not i[-9:] == '_mas | k.png']
if len(filte | red_images) > 1:
error('Found multiple matching control images for {}'.format(path))
elif len(filtered_images) == 0:
error('No matching control images found for {}'.format(path))
found_image = filtered_images[0]
print('Found matching control image: {}'.format(found_image))
return found_image
def updateMask(control_image_path, rendered_image_path, mask_image_path):
control_image = imageFromPath(control_image_path)
if not control_image:
error('Could not read control image {}'.format(control_image_path))
rendered_image = imageFromPath(rendered_image_path)
if not rendered_image:
error('Could not read rendered image {}'.format(rendered_image_path))
if not rendered_image.width() == control_image.width() or not rendered_image.height() == control_image.height():
print(('Size mismatch - control image is {}x{}, rendered image is {}x{}'.format(control_image.width(),
control_image.height(),
rendered_image.width(),
rendered_image.height())))
max_width = min(rendered_image.width(), control_image.width())
max_height = min(rendered_image.height(), control_image.height())
#read current mask, if it exist
mask_image = imageFromPath(mask_image_path)
if mask_image.isNull():
print('Mask image does not exist, creating {}'.format(mask_image_path))
mask_image = QImage(control_image.width(), control_image.height(), QImage.Format_ARGB32)
mask_image.fill(QColor(0, 0, 0))
#loop through pixels in rendered image and compare
mismatch_count = 0
linebytes = max_width * 4
for y in range(max_height):
control_scanline = control_image.constScanLine(y).asstring(linebytes)
rendered_scanline = rendered_image.constScanLine(y).asstring(linebytes)
mask_scanline = mask_image.scanLine(y).asstring(linebytes)
for x in range(max_width):
currentTolerance = qRed(struct.unpack('I', mask_scanline[x * 4:x * 4 + 4])[0])
if currentTolerance == 255:
#ignore pixel
continue
expected_rgb = struct.unpack('I', control_scanline[x * 4:x * 4 + 4])[0]
rendered_rgb = struct.unpack('I', rendered_scanline[x * 4:x * 4 + 4])[0]
difference = colorDiff(expected_rgb, rendered_rgb)
if difference > currentTolerance:
#update mask image
mask_image.setPixel(x, y, qRgb(difference, difference, difference))
mismatch_count += 1
if mismatch_count:
#update mask
mask_image.save(mask_image_path, "png")
print('Updated {} pixels in {}'.format(mismatch_count, mask_image_path))
else:
print('No mismatches in {}'.format(mask_image_path))
parser = argparse.ArgumentParser() # OptionParser("usage: %prog control_image rendered_image mask_image")
parser.add_argument('control_image')
parser.add_argument('rendered_image')
parser.add_argument('mask_image', nargs='?', default=None)
args = parser.parse_args()
args.control_image = getControlImagePath(args.control_image)
if not args.mask_image:
args.mask_image = args.control_image[:-4] + '_mask.png'
updateMask(args.control_image, args.rendered_image, args.mask_image)
|
mikeckennedy/cookiecutter-course | src/ch8_sharing_your_template/show_off_web_app/show_off_web_app/__init__.py | Python | gpl-2.0 | 7,255 | 0.003584 | import datetime
import pkg_resources
import os
import sys
from pyramid.config import Configurator
# noinspection PyUnresolvedReferences
import show_off_web_app
import show_off_web_app.controllers.home_controller as home
import show_off_web_app.controllers.account_controller as account
import show_off_web_app.controllers.newsletter_controller as news
from show_off_web_app.data.dbsession import DbSessionFactory
from show_off_web_app.email.template_paser import EmailTemplateParser
from show_off_web_app.services.email_service import EmailService
from show_off_web_app.services.log_service import LogService
from show_off_web_app.services.mailinglist_service import MailingListService
dev_mode = False
def main(_, **settings):
config = Configurator(settings=settings)
init_logging(config) # log setup mu | st run first
init_mode(config) # mode must go next
init_includes(config) # includes must go next
init_routing(co | nfig) # it's pretty much flexible from here on down
init_db(config)
init_mailing_list(config)
init_smtp_mail(config)
init_email_templates(config)
return config.make_wsgi_app()
def init_logging(config):
settings = config.get_settings()
log_level = settings.get('log_level')
log_filename = settings.get('log_filename')
LogService.global_init(log_level, log_filename)
log_package_versions()
def init_email_templates(_):
EmailTemplateParser.global_init()
def init_smtp_mail(config):
global dev_mode
unset = 'YOUR_VALUE'
settings = config.get_settings()
smtp_username = settings.get('smtp_username')
smtp_password = settings.get('smtp_password')
smtp_server = settings.get('smtp_server')
smtp_port = settings.get('smtp_port')
local_dev_mode = dev_mode
if smtp_username == unset:
log = LogService.get_startup_log()
log.warn("SMTP server values not set in config file. "
"Outbound email will not work.")
local_dev_mode = True # turn off email if the system has no server.
EmailService.global_init(smtp_username, smtp_password, smtp_server, smtp_port, local_dev_mode)
def init_db(_):
global dev_mode
top_folder = os.path.dirname(show_off_web_app.__file__)
rel_file = os.path.join('db', 'show_off_web_app.sqlite')
if dev_mode:
rel_file = rel_file.replace('.sqlite', '_dev.sqlite')
else:
rel_file = rel_file.replace('.sqlite', '_prod.sqlite')
db_file = os.path.join(top_folder, rel_file)
DbSessionFactory.global_init(db_file)
def init_mode(config):
global dev_mode
settings = config.get_settings()
dev_mode = settings.get('mode') == 'dev'
log = LogService.get_startup_log()
log.notice('Running in {} mode.'.format('dev' if dev_mode else 'prod'))
def init_mailing_list(config):
unset = 'ADD_YOUR_API_KEY'
settings = config.get_settings()
mailchimp_api = settings.get('mailchimp_api')
mailchimp_list_id = settings.get('mailchimp_list_id')
if mailchimp_api == unset:
log = LogService.get_startup_log()
log.warn("Mailchimp API values not set in config file. "
"Mailing list subscriptions will not work.")
MailingListService.global_init(mailchimp_api, mailchimp_list_id)
def init_routing(config):
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_handler('root', '/', handler=home.HomeController, action='index')
add_controller_routes(config, home.HomeController, 'home')
add_controller_routes(config, account.AccountController, 'account')
add_controller_routes(config, news.NewsletterController, 'newsletter')
config.scan()
def add_controller_routes(config, ctrl, prefix):
config.add_handler(prefix + 'ctrl_index', '/' + prefix, handler=ctrl, action='index')
config.add_handler(prefix + 'ctrl_index/', '/' + prefix + '/', handler=ctrl, action='index')
config.add_handler(prefix + 'ctrl', '/' + prefix + '/{action}', handler=ctrl)
config.add_handler(prefix + 'ctrl/', '/' + prefix + '/{action}/', handler=ctrl)
config.add_handler(prefix + 'ctrl_id', '/' + prefix + '/{action}/{id}', handler=ctrl)
def init_includes(config):
config.include('pyramid_chameleon')
config.include('pyramid_handlers')
# config.include('rollbar.contrib.pyramid')
def log_package_versions():
startup_log = LogService.get_startup_log()
# TODO: UPDATE WITH OUR DEPENDENCIES
# update from setup.py when changed!
# This list is the closure of all dependencies,
# taken from: pip list --format json
requires = [{"name": "show_off_web_app", "version": "0.0"}, {"name": "appdirs", "version": "1.4.3"},
{"name": "Chameleon", "version": "3.1"}, {"name": "docopt", "version": "0.4.0"},
{"name": "html2text", "version": "2016.9.19"}, {"name": "hupper", "version": "0.4.4"},
{"name": "Logbook", "version": "1.0.0"}, {"name": "mailchimp", "version": "2.0.9"},
{"name": "mailer", "version": "0.8.1"}, {"name": "Mako", "version": "1.0.6"},
{"name": "MarkupSafe", "version": "1.0"}, {"name": "packaging", "version": "16.8"},
{"name": "passlib", "version": "1.7.1"}, {"name": "PasteDeploy", "version": "1.5.2"},
{"name": "pip", "version": "9.0.1"}, {"name": "Pygments", "version": "2.2.0"},
{"name": "pyparsing", "version": "2.2.0"}, {"name": "pyramid", "version": "1.8.3"},
{"name": "pyramid-chameleon", "version": "0.3"}, {"name": "pyramid-debugtoolbar", "version": "3.0.5"},
{"name": "pyramid-handlers", "version": "0.5"}, {"name": "pyramid-mako", "version": "1.0.2"},
{"name": "repoze.lru", "version": "0.6"}, {"name": "requests", "version": "2.13.0"},
{"name": "rollbar", "version": "0.13.11"}, {"name": "setuptools", "version": "34.3.2"},
{"name": "six", "version": "1.10.0"}, {"name": "SQLAlchemy", "version": "1.1.6"},
{"name": "translationstring", "version": "1.3"}, {"name": "venusian", "version": "1.0"},
{"name": "waitress", "version": "1.0.2"}, {"name": "WebOb", "version": "1.7.2"},
{"name": "zope.deprecation", "version": "4.2.0"}, {"name": "zope.interface", "version": "4.3.3"}]
requires.sort(key=lambda d: d['name'].lower())
t0 = datetime.datetime.now()
startup_log.notice('---------- Python version info ------------------')
startup_log.notice(sys.version.replace('\n', ' ').replace(' ', ' '))
startup_log.notice('---------- package version info ------------------')
for rec in requires:
try:
version = pkg_resources.get_distribution(rec['name']).version
if version:
startup_log.notice('{} v{}'.format(rec['name'], version))
else:
startup_log.notice("WHERE IS IT? {}.".format(rec['name']))
except Exception as x:
startup_log.notice('{} UNKNOWN VERSION ({})'.format(rec['name'], x))
dt = datetime.datetime.now() - t0
startup_log.notice('Package info gathered in {} sec'.format(dt.total_seconds()))
startup_log.notice('--------------------------------------------------')
|
decvalts/landlab | landlab/components/vegetation_ca/__init__.py | Python | mit | 106 | 0 |
import landlab.components.vegetation_ca.CA_V | eg
from landlab.components.vegetation_ca.CA_Veg import V | egCA
|
kleientertainment/ds_mod_tools | pkg/win32/Python27/Lib/pydoc.py | Python | mit | 95,949 | 0.002053 | #!/usr/bin/env python
# -*- coding: latin-1 -*-
"""Generate Python documentation in HTML or text for interactive use.
In the Python interpreter, do "from pydoc import help" to provide online
help. Calling help(thing) on a Python object documents the object.
Or, at the shell command line outside of Python:
Run "pydoc <name>" to show documentation on something. <name> may be
the name of a function, module, package, or a dotted reference to a
class or function within a module or module in a package. If the
argument contains a path segment delimiter (e.g. slash on Unix,
backslash on Windows) it is treated as the path to a Python source file.
Run "pydoc -k <keyword>" to search for a keyword in the synopsis lines
of all available modules.
Run "pydoc -p <port>" to start an HTTP server on a given port on the
local machine to generate documentation web pages.
For platforms without a command line, "pydoc -g" starts the HTTP server
and also pops up a little window for controlling it.
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
Module docs for core modules are assumed to be in
http://docs.python.org/library/
This can be overridden by setting the PYTHONDOCS environment variable
to a different URL or to a local directory containing the Library
Reference Manual pages.
"""
__author__ = "Ka-Ping Yee <ping@lfw.org>"
__date__ = "26 February 2001"
__version__ = "$Revision: 88564 $"
__credits__ = """Guido van Rossum, for an excellent programming language.
Tommy Burnette, the original creator of manpy.
Paul Prescod, for all his work on onlinehelp.
Richard Chamberlain, for the first implementation of textdoc.
"""
# Known bugs that can't be fixed here:
# - imp.load_module() cannot be prevented from clobbering existing
# loaded modules, so calling synopsis() on a binary module file
# changes the contents of any existing module with the same name.
# - If the __file__ attribute on a module is a relative path and
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
import sys, imp, os, re, types, inspect, __builtin__, pkgutil, warnings
from repr import Repr
from string import expandtabs, find, join, lower, split, strip, rfind, rstrip
from traceback import extract_tb
try:
from collections import deque
except ImportError:
# Python 2.3 compatibility
class deque(list):
def popleft(self):
return self.pop(0)
# --------------------------------------------------------- common routines
def pathdirs():
"""Convert sys.path into a list of absolute, existing, unique paths."""
dirs = []
normdirs = []
for dir in sys.path:
dir = os.path.abspath(dir or '.')
normdir = os.path.normcase(dir)
if normdir not in normdirs and os.path.isdir(dir):
dirs.append(dir)
normdirs.append(normdir)
return dirs
def getdoc(object):
"""Get the doc string or comments for an object."""
result = inspect.getdoc(object) or inspect.getcomments(object)
return result and re.sub('^ *\n', '', rstrip(result)) or ''
def splitdoc(doc):
"""Split a doc string into a synopsis line (if any) and the rest."""
lines = split(strip(doc), '\n')
if len(lines) == 1:
return lines[0], ''
elif len(lines) >= 2 and not rstrip(lines[1]):
return lines[0], join(lines[2:], '\n')
return '', join(lines, '\n')
def classname(object, modname):
"""Get a class name and qualify it with a module name if necessary."""
name = object.__name__
if object.__module__ != modname:
name = object.__module__ + '.' + name
return name
def isdata(object):
"""Check if an object is of a type that probably means it's data."""
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
def replace(text, *pairs):
"""Do a series of global replacements on a string."""
while pairs:
text = join(split(text, pairs[0]), pairs[1])
pairs = pairs[2:]
return text
def cram(text, maxlen):
"""Omit part of a string if needed to make it fit in a maximum length."""
if len(text) > maxlen:
pre = max(0, (maxlen-3)//2)
post = max(0, maxlen-3-pre)
return text[:pre] + '...' + text[len(text)-post:]
return text
_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE)
def stripid(text):
"""Remove the hexadecimal id from a Python object representation."""
# The behaviour of %p is implementation-dependent in terms of case.
return _re_stripid.sub(r'\1', text)
def _is_some_method(obj):
return inspect.ismethod(obj) or inspect.ismethoddescriptor(obj)
def allmethods(cl):
methods = {}
for key, value in inspect.getmembers(cl, _is_some_method):
methods[key] = 1
for base in cl.__bases__:
methods.update(allmethods(base) | ) # all your base are belong to us
for key in methods.keys():
methods[key] = | getattr(cl, key)
return methods
def _split_list(s, predicate):
"""Split sequence s via predicate, and return pair ([true], [false]).
The return value is a 2-tuple of lists,
([x for x in s if predicate(x)],
[x for x in s if not predicate(x)])
"""
yes = []
no = []
for x in s:
if predicate(x):
yes.append(x)
else:
no.append(x)
return yes, no
def visiblename(name, all=None, obj=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant.
_hidden_names = ('__builtins__', '__doc__', '__file__', '__path__',
'__module__', '__name__', '__slots__', '__package__')
if name in _hidden_names: return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
# Namedtuples have public fields and methods with a single leading underscore
if name.startswith('_') and hasattr(obj, '_fields'):
return 1
if all is not None:
# only document that which the programmer exported in __all__
return name in all
else:
return not name.startswith('_')
def classify_class_attrs(object):
"""Wrap inspect.classify_class_attrs, with fixup for data descriptors."""
def fixup(data):
name, kind, cls, value = data
if inspect.isdatadescriptor(value):
kind = 'data descriptor'
return name, kind, cls, value
return map(fixup, inspect.classify_class_attrs(object))
# ----------------------------------------------------- module manipulation
def ispackage(path):
"""Guess whether a path refers to a package directory."""
if os.path.isdir(path):
for ext in ('.py', '.pyc', '.pyo'):
if os.path.isfile(os.path.join(path, '__init__' + ext)):
return True
return False
def source_synopsis(file):
line = file.readline()
while line[:1] == '#' or not strip(line):
line = file.readline()
if not line: break
line = strip(line)
if line[:4] == 'r"""': line = line[1:]
if line[:3] == '"""':
line = line[3:]
if line[-1:] == '\\': line = line[:-1]
while not strip(line):
line = file.readline()
if not line: break
result = strip(split(line, '"""')[0])
else: result = None
return result
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
mtime = os.stat(filename).st_mtime
lastupdate, result = cache.get(filename, (None, None))
if lastupdate is None or lastupdate < mtime:
info = inspect.getmoduleinfo(filename)
try:
file = open(filename)
except IOError:
# module can't be opened, so skip it
|
DigitalCampus/django-oppia | api/resources/course.py | Python | gpl-3.0 | 9,945 | 0 | import json
import os
import re
import shutil
import xmltodict
import zipfile
from django.conf import settings
from django.conf.urls import url |
from django.core.exceptions import | MultipleObjectsReturned
from django.db.models import Q
from django.http import HttpResponse, Http404
from django.utils.translation import ugettext_lazy as _
from tastypie import fields
from tastypie.authentication import ApiKeyAuthentication, Authentication
from tastypie.authorization import ReadOnlyAuthorization, Authorization
from tastypie.resources import ModelResource
from tastypie.utils import trailing_slash
from api.serializers import CourseJSONSerializer
from oppia.models import Tracker, Course, CourseCategory
from oppia.signals import course_downloaded
STR_COURSE_NOT_FOUND = _(u"Course not found")
def get_course_from_shortname(resource, bundle, lookup):
object_list = resource.apply_filters(bundle.request,
{'shortname': lookup})
if len(object_list) <= 0:
raise resource._meta.object_class.DoesNotExist(
"Couldn't find an course with shortname '%s'." % (lookup))
elif len(object_list) > 1:
raise MultipleObjectsReturned(
"More than one course with shortname '%s'." % (lookup))
return object_list
class CourseResource(ModelResource):
class Meta:
queryset = Course.objects.all()
resource_name = 'course'
allowed_methods = ['get']
fields = ['id',
'title',
'version',
'shortname',
'priority',
'is_draft',
'description',
'author',
'username',
'organisation']
authentication = ApiKeyAuthentication()
authorization = ReadOnlyAuthorization()
serializer = CourseJSONSerializer()
always_return_data = True
include_resource_uri = True
def obj_get(self, bundle, **kwargs):
"""
Overriden get method to perform a direct lookup if we are searching
by shortname instead of pk
"""
lookup = kwargs[self._meta.detail_uri_name]
if re.search('[a-zA-Z]', lookup):
object_list = get_course_from_shortname(self, bundle, lookup)
bundle.obj = object_list[0]
self.authorized_read_detail(object_list, bundle)
return bundle.obj
else:
return super().obj_get(bundle, **kwargs)
def get_object_list(self, request):
if request.user.is_staff:
return Course.objects.filter(is_archived=False) \
.order_by('-priority', 'title')
else:
return Course.objects.filter(is_archived=False) \
.filter(
Q(is_draft=False) |
(Q(is_draft=True) & Q(user=request.user))) \
.order_by('-priority', 'title')
def prepend_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/download%s$"
% (self._meta.resource_name, trailing_slash()),
self.wrap_view('download_course'), name="api_download_course"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/activity%s$"
% (self._meta.resource_name, trailing_slash()),
self.wrap_view('download_activity'),
name="api_download_activity"),
]
def get_course(self, request, **kwargs):
self.is_authenticated(request)
self.throttle_check(request)
pk = kwargs.pop('pk', None)
try:
if request.user.is_staff:
course = self._meta.queryset.get(pk=pk, is_archived=False)
else:
course = self._meta.queryset \
.filter(
Q(is_draft=False) |
(Q(is_draft=True) & Q(user=request.user)) |
(Q(is_draft=True)
& Q(coursepermissions__user=request.user))) \
.distinct().get(pk=pk, is_archived=False)
except Course.DoesNotExist:
raise Http404(STR_COURSE_NOT_FOUND)
except ValueError:
try:
if request.user.is_staff:
course = self._meta.queryset.get(shortname=pk,
is_archived=False)
else:
course = self._meta.queryset \
.filter(
Q(is_draft=False) |
(Q(is_draft=True) & Q(user=request.user)) |
(Q(is_draft=True)
& Q(coursepermissions__user=request.user))) \
.distinct().get(shortname=pk, is_archived=False)
except Course.DoesNotExist:
raise Http404(STR_COURSE_NOT_FOUND)
return course
def download_course(self, request, **kwargs):
course = self.get_course(request, **kwargs)
file_to_download = course.getAbsPath()
has_completed_trackers = Tracker.has_completed_trackers(course,
request.user)
try:
if has_completed_trackers:
file_to_download = os.path.join(
settings.COURSE_UPLOAD_DIR,
"temp",
str(request.user.id) + "-" + course.filename)
shutil.copy2(course.getAbsPath(), file_to_download)
course_zip = zipfile.ZipFile(file_to_download, 'a')
if has_completed_trackers:
course_zip.writestr(course.shortname + "/tracker.xml",
Tracker.to_xml_string(course,
request.user))
course_zip.close()
binary_file = open(file_to_download, 'rb')
response = HttpResponse(binary_file.read(),
content_type='application/zip')
binary_file.close()
response['Content-Length'] = os.path.getsize(file_to_download)
response['Content-Disposition'] = \
'attachment; filename="%s"' % (course.filename)
except IOError:
raise Http404(STR_COURSE_NOT_FOUND)
course_downloaded.send(sender=self, course=course, request=request)
return response
def download_activity(self, request, **kwargs):
course = self.get_course(request, **kwargs)
return HttpResponse(Tracker.to_xml_string(course,
request.user),
content_type='text/xml')
def dehydrate(self, bundle):
bundle.data['url'] = bundle.request.build_absolute_uri(
bundle.data['resource_uri'] + 'download/')
# make sure title is shown as json object (not string representation \
# of one)
bundle.data['title'] = json.loads(bundle.data['title'])
try:
bundle.data['description'] = json.loads(bundle.data['description'])
except json.JSONDecodeError:
pass
course = Course.objects.get(pk=bundle.obj.pk)
if course and course.user:
bundle.data['author'] = course.user.first_name \
+ " " \
+ course.user.last_name
bundle.data['username'] = course.user.username
bundle.data['organisation'] = course.user.userprofile.organisation
return bundle
class CourseCategoryResource(ModelResource):
course = fields.ToOneField('api.resource.course.CourseResource',
'course',
full=True)
class Meta:
queryset = CourseCategory.objects.all()
allowed_methods = ['get']
resource_name = 'coursetag'
fields = ['id', 'course', 'category']
include_resource_uri = False
authentication = ApiKeyAuthen |
sporsh/jostedal | jostedal/utils.py | Python | mit | 185 | 0.016216 | import hashlib
def saslprep(string):
#TODO
| return string
def ha1(username, realm, password):
return hashlib.md5(':'.join((username, realm, saslprep(password)))).digest | ()
|
jabesq/home-assistant | tests/components/deconz/test_gateway.py | Python | apache-2.0 | 7,242 | 0 | """Test deCONZ gateway."""
from unittest.mock import Mock, patch
import pytest
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.components.deconz import errors, gateway
from tests.common import mock_coro
import pydeconz
ENTRY_CONFIG = {
"host": "1.2.3.4",
"port": 80,
"api_key": "1234567890ABCDEF",
"bridgeid": "0123456789ABCDEF",
"allow_clip_sensor": True,
"allow_deconz_groups": True,
}
async def test_gateway_setup():
"""Successful setup."""
hass = Mock()
entry = Mock()
entry.data = ENTRY_CONFIG
api = Mock()
api.async_add_remote.return_value = Mock()
api.sensors = {}
deconz_gateway = gateway.DeconzGateway(hass, entry)
with patch.object(gateway, 'get_gateway', return_value=mock_coro(api)), \
patch.object(
gateway, 'async_dispatcher_connect', return_value=Mock()):
assert await deconz_gateway.async_setup() is True
assert deconz_gateway.api is api
assert len(hass.config_entries.async_forward_entry_setup.mock_calls) == 7
assert hass.config_entries.async_forward_entry_setup.mock_calls[0][1] == \
(entry, 'binary_sensor')
assert hass.config_entries.async_forward_entry_setup.mock_calls[1][1] == \
(entry, 'climate')
assert hass.config_entries.async_forward_entry_setup.mock_calls[2][1] == \
(entry, 'cover')
assert hass.config_entries.async_forward_entry_setup.mock_calls[3][1] == \
(entry, 'light')
assert hass.config_entries.async_forward_entry_setup.mock_calls[4][1] == \
(entry, 'scene')
assert hass.config_entries.async_forward_entry_setup.mock_calls[5][1] == \
(entry, 'sensor')
assert hass.config_entries.async_forward_entry_setup.mock_calls[6][1] == \
(entry, 'switch')
assert len(api.start.mock_calls) == 1
async def test_gateway_retry():
"""Retry setup."""
hass = Mock()
entry = Mock()
entry.data = ENTRY_CONFIG
deconz_gateway = gateway.DeconzGateway(hass, entry)
with patch.object(
gateway, 'get_gateway', side_effect=errors.CannotConnect), \
pytest.raises(ConfigEntryNotReady):
await deconz_gat | eway | .async_setup()
async def test_gateway_setup_fails():
"""Retry setup."""
hass = Mock()
entry = Mock()
entry.data = ENTRY_CONFIG
deconz_gateway = gateway.DeconzGateway(hass, entry)
with patch.object(gateway, 'get_gateway', side_effect=Exception):
result = await deconz_gateway.async_setup()
assert not result
async def test_connection_status(hass):
"""Make sure that connection status triggers a dispatcher send."""
entry = Mock()
entry.data = ENTRY_CONFIG
deconz_gateway = gateway.DeconzGateway(hass, entry)
with patch.object(gateway, 'async_dispatcher_send') as mock_dispatch_send:
deconz_gateway.async_connection_status_callback(True)
await hass.async_block_till_done()
assert len(mock_dispatch_send.mock_calls) == 1
assert len(mock_dispatch_send.mock_calls[0]) == 3
async def test_add_device(hass):
"""Successful retry setup."""
entry = Mock()
entry.data = ENTRY_CONFIG
deconz_gateway = gateway.DeconzGateway(hass, entry)
with patch.object(gateway, 'async_dispatcher_send') as mock_dispatch_send:
deconz_gateway.async_add_device_callback('sensor', Mock())
await hass.async_block_till_done()
assert len(mock_dispatch_send.mock_calls) == 1
assert len(mock_dispatch_send.mock_calls[0]) == 3
async def test_add_remote():
"""Successful add remote."""
hass = Mock()
entry = Mock()
entry.data = ENTRY_CONFIG
remote = Mock()
remote.name = 'name'
remote.type = 'ZHASwitch'
remote.register_async_callback = Mock()
deconz_gateway = gateway.DeconzGateway(hass, entry)
deconz_gateway.async_add_remote([remote])
assert len(deconz_gateway.events) == 1
async def test_shutdown():
"""Successful shutdown."""
hass = Mock()
entry = Mock()
entry.data = ENTRY_CONFIG
deconz_gateway = gateway.DeconzGateway(hass, entry)
deconz_gateway.api = Mock()
deconz_gateway.shutdown(None)
assert len(deconz_gateway.api.close.mock_calls) == 1
async def test_reset_after_successful_setup():
"""Verify that reset works on a setup component."""
hass = Mock()
entry = Mock()
entry.data = ENTRY_CONFIG
api = Mock()
api.async_add_remote.return_value = Mock()
api.sensors = {}
deconz_gateway = gateway.DeconzGateway(hass, entry)
with patch.object(gateway, 'get_gateway', return_value=mock_coro(api)), \
patch.object(
gateway, 'async_dispatcher_connect', return_value=Mock()):
assert await deconz_gateway.async_setup() is True
listener = Mock()
deconz_gateway.listeners = [listener]
event = Mock()
event.async_will_remove_from_hass = Mock()
deconz_gateway.events = [event]
deconz_gateway.deconz_ids = {'key': 'value'}
hass.config_entries.async_forward_entry_unload.return_value = \
mock_coro(True)
assert await deconz_gateway.async_reset() is True
assert len(hass.config_entries.async_forward_entry_unload.mock_calls) == 7
assert len(listener.mock_calls) == 1
assert len(deconz_gateway.listeners) == 0
assert len(event.async_will_remove_from_hass.mock_calls) == 1
assert len(deconz_gateway.events) == 0
assert len(deconz_gateway.deconz_ids) == 0
async def test_get_gateway(hass):
"""Successful call."""
with patch('pydeconz.DeconzSession.async_load_parameters',
return_value=mock_coro(True)):
assert await gateway.get_gateway(hass, ENTRY_CONFIG, Mock(), Mock())
async def test_get_gateway_fails_unauthorized(hass):
"""Failed call."""
with patch('pydeconz.DeconzSession.async_load_parameters',
side_effect=pydeconz.errors.Unauthorized), \
pytest.raises(errors.AuthenticationRequired):
assert await gateway.get_gateway(
hass, ENTRY_CONFIG, Mock(), Mock()) is False
async def test_get_gateway_fails_cannot_connect(hass):
"""Failed call."""
with patch('pydeconz.DeconzSession.async_load_parameters',
side_effect=pydeconz.errors.RequestError), \
pytest.raises(errors.CannotConnect):
assert await gateway.get_gateway(
hass, ENTRY_CONFIG, Mock(), Mock()) is False
async def test_create_event():
"""Successfully created a deCONZ event."""
hass = Mock()
remote = Mock()
remote.name = 'Name'
event = gateway.DeconzEvent(hass, remote)
assert event._id == 'name'
async def test_update_event():
"""Successfully update a deCONZ event."""
hass = Mock()
remote = Mock()
remote.name = 'Name'
event = gateway.DeconzEvent(hass, remote)
remote.changed_keys = {'state': True}
event.async_update_callback()
assert len(hass.bus.async_fire.mock_calls) == 1
async def test_remove_event():
"""Successfully update a deCONZ event."""
hass = Mock()
remote = Mock()
remote.name = 'Name'
event = gateway.DeconzEvent(hass, remote)
event.async_will_remove_from_hass()
assert event._device is None
|
MJB47/Jokusoramame | joku/core/tagengine.py | Python | mit | 4,461 | 0.00269 | """
A Jinja2-based tag engine for tags.
"""
import asyncio
import inspect
import random
import string
from concurrent.futures import ThreadPoolExecutor
import discord
import functools
import lupa
from discord.abc import GuildChannel
from jinja2 import Template
from jinja2.sandbox import SandboxedEnvironment
from lupa._lupa import LuaRuntime
from joku.cogs.lua import sandbox_preamble, dictify_table_recursively, NO_RESULT
from joku.core.bot import Context, Jokusoramame
from joku.core.mp2 import ProcessPoolExecutor
from joku.db.tables import Tag
class TagEngine(object):
def __init__(self, bot: Jokusoramame):
# Template environment.
# This is a SandboxedEnvironment for security purposes.
self.tmpl_env = SandboxedEnvironment()
# The process pool used.
self.executor = ProcessPoolExecutor()
# The bot instance.
# We use this for getting the tag instance.
self.bot = bot
# Update the globals of the template environment.
self.tmpl_env.globals.update(
{
"random": random,
"string": string,
"list": list,
"str": str,
"tuple": tuple,
}
)
@staticmethod
def _lua_render_template(luastr: str, kwargs=None):
"""
Renders a Lua template.
"""
def getter(obj, attr_name):
if attr_name.startswith("_"):
raise AttributeError("Not allowed to access attribute `{}` of `{}`"
.format(attr_name, type(obj).__name__))
return attr_name
def setter(obj, attr_name, value):
raise AttributeError("Python object attribute setting is forbidden")
# the attribute_handlers are probably enough to prevent access eval otherwise
lua = LuaRuntime(register_eval=Fa | lse,
unpack_returned_tuples=True,
attribute_handlers=(getter, setter))
# execute the sandbox preamble
sandbox = lua.execute(sandbox_preamble)
# call sandbox.run with `glob.sandbox, code`
# and unpack the variables
new = {}
# HECK
for key, val in kwargs.items(): |
new[key] = lua.table_from(val)
_ = sandbox.run(luastr, lua.table_from(new))
if isinstance(_, bool):
# idk
return NO_RESULT
called, result = _
if lupa.lua_type(result) == 'table':
# dictify
result = dictify_table_recursively(result)
return str(result)
@staticmethod
def _pp_render_template(tmpl_env: SandboxedEnvironment, tag: Tag, kwargs=None):
"""
Called inside the process pool to render the template.
"""
template = tmpl_env.from_string(tag.content or "Broken tag!") # type: Template
# variables = tag.get("variables", {})
# def _set_variable(name, value):
# variables[name] = value
# local = {
# "set_variable": _set_variable,
# **variables,
# }
# if kwargs:
# local.update(kwargs)
rendered = template.render(**kwargs)
return rendered
async def _render_template(self, tag: Tag, **kwargs):
"""
Renders the template in a process pool.
"""
if tag.lua:
partial = functools.partial(self._lua_render_template, tag.content, kwargs)
else:
partial = functools.partial(self._pp_render_template, self.tmpl_env, tag, kwargs)
rendered = await asyncio.wait_for(self.bot.loop.run_in_executor(self.executor, partial), 5, loop=self.bot.loop)
return rendered
async def render_template(self, tag_id: str, ctx: Context = None, guild: discord.Guild = None,
**kwargs) -> str:
"""
Renders a template.
This will load all variables, render the template, and return the rendered template as output.
"""
guild = guild or ctx.message.guild
tag = await self.bot.database.get_tag(guild, tag_id)
if not tag:
return None
final_template = await self._render_template(tag, **kwargs)
# await self.bot.database.save_tag(guild, tag_id, content=tag.get("content"),
# variables=new_variables)
return final_template
|
ryanlelek/SMORESGaitRecorder | proto/inertial_pb2.py | Python | gpl-3.0 | 4,116 | 0.001458 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: inertial.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
import pose_pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='inertial.proto',
package='gazebo.msgs',
serialized_pb='\n\x0einertial.proto\x12\x0bgazebo.msgs\x1a\npose.proto\"\x87\x01\n\x08Inertial\x12\x0c\n\x04mass\x18\x01 \x01(\x01\x12\x1f\n\x04pose\x18\x02 \x01(\x0b\x32\x11.gazebo.msgs.Pose\x12\x0b\n\x03ixx\x18\x03 \x01(\x01\x12\x0b\n\x03ixy\x18\x04 \x01(\x01\x12\x0b\n\x03ixz\x18\x05 \x01(\x01\x12\x0b\n\x03iyy\x18\x06 \x01(\x01\x12\x0b\n\x03iyz\x18\x07 \x01(\x01\x12\x0b\n\x03izz\x18\x08 \x01(\x01')
_INERTIAL = _descriptor.Descriptor(
name='Inertial',
full_name='gazebo.msgs.Inertial',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='mass', full_name='gazebo.msgs.Inertial.mass', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pose', full_name='gazebo.msgs.Inertial.pose', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ixx', full_name='gazebo.msgs.Inertial.ixx', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ixy', full_name='gazebo.msgs.Inertial.ixy', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ixz', full_name='gazebo.msgs.Inertial.ixz', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iyy', full_name='gazebo.msgs.Inertial.iyy', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iyz', full_name='gazebo.msgs.Inertial.iyz', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='izz', full_name='gazebo.msgs.Inertial.izz', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges | =[],
serialized_start=44,
serialized_end=179,
)
_INERTIAL.fields_by_name['pose'].message_type = pose_pb2._POSE
DESCRIPTOR.message_types_by_name['Inertial'] = _INERTIAL
class Inertial(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageTyp | e
DESCRIPTOR = _INERTIAL
# @@protoc_insertion_point(class_scope:gazebo.msgs.Inertial)
# @@protoc_insertion_point(module_scope)
|
VerstandInvictus/Spidergram | spidergram.py | Python | mit | 4,748 | 0.001053 | from bs4 import BeautifulSoup
import requests
import re
import os
import codecs
import unidecode
import arrow
import traceback
# disable warning about HTTPS
try:
requests.packages.urllib3.disable_warnings()
except:
pass
class instaLogger:
def __init__(self, logfile):
self.logfile = logfile
def logEntry(self, entry, level):
with codecs.open(self.logfile, mode='a', encoding='utf-8') as log:
log.write(entry + '\n')
if 'progress' in level:
print unidecode.unidecode(entry)
class instagram:
def __init__(self, logobj):
self.logger = logobj
self.dest = os.path.join(os.getcwdu(), 'images')
if not os.path.exists(self.dest):
os.makedirs(self.dest)
self.results = None
self.resetResults()
self.baseUrl = None
def resetResults(self):
self.results = dict(
count=0,
skipped=0,
failed=0,
succeeded=0,
nonexistent=0,
)
def setBaseUrl(self, url):
# storing base URL simplifies recursion
self.baseUrl = url
def downloadImage(self, imgurl, dest=None):
# download an image, avoiding duplication.
imgname = imgurl.split('/')[-1]
if not dest:
rdest = self.dest
else:
rdest = os.path.join(self.dest, dest)
imgwrite = os.path.join(rdest, imgname)
if not os.path.exists(rdest):
os.makedirs(rdest)
try:
if not os.path.exists(imgwrite):
r = requests.get(imgurl)
with open(imgwrite, "wb") as code:
code.write(r.content)
self.logger.logEntry(('downloaded ' + imgname), 'progress')
self.results['succeeded'] += 1
return True
else:
self.logger.logEntry(('already have ' + imgname),
'verbose')
self.results['skipped'] += 1
return True
except:
exc = traceback.format_exc()
print exc
self.logger.logEntry(
'failed to get: {0} from {1} - Traceback:\n{2}'.format(
imgurl, imgname, exc), 'verbose')
self.results['failed'] += 1
return None
def findWindowSharedData(self, pageurl):
page = requests.get(pageurl).content
soup = BeautifulSoup(page, "html.parser")
scripts = soup.find_all('script')
for each in scripts:
if each.string:
if each.string.startswith('window._sharedData'):
return each.string.split(' = ')[-1]
def getLinksForGalleryPage(self, url):
"""
Recursive function to t | raverse the script payload that apparently
is used to load Instagram pages completely on the fly. Pulls each
individual "page" - which is apparently 24 images by the user,
delineated by the "start_cursor" and "end_cursor" in the payload -
so that it can be parsed for images, and then us | es the ending cursor
to generate the link to the next "page".
"""
username = baseurl.split('/')[-2]
print "Downloaded {1} images. Scanning {0}...".format(
url, self.results['succeeded'])
payloadRaw = self.findWindowSharedData(url)
payloadRaw = re.sub('/', '', payloadRaw)
postIds = re.findall(
'(?<=\{"code":").*?"',
payloadRaw)
for code in postIds:
hrlink = self.getHighResLink(code[:-1])
self.downloadImage(hrlink, dest=username)
hasNextId = re.search(
'(?<=has_next_page":)[truefals]*',
payloadRaw)
if hasNextId.group(0) == "true":
nextId = re.search(
'(?<=end_cursor":")[0-9]*',
payloadRaw)
nextUrl = self.baseUrl + "?max_id=" + nextId.group(0)
self.getLinksForGalleryPage(nextUrl)
else:
return
def getHighResLink(self, code):
pageurl = 'https://www.instagram.com/p/{0}/?hl=en'.format(code)
payloadRaw = self.findWindowSharedData(pageurl)
hrlink = re.findall(
'(?<="display_src":").*?\?',
payloadRaw)[0]
hrlink = hrlink.replace('\\', '')[:-1]
return hrlink
if __name__ == "__main__":
dt = arrow.utcnow().to('US/Pacific').format('YYYY-MM-DD')
logfile = os.path.join('logs', str('spidergram ' + dt + '.log'))
logger = instaLogger(logfile)
site = instagram(logger)
baseurl = "https://www.instagram.com/13thwitness/"
site.setBaseUrl(baseurl)
site.getLinksForGalleryPage(baseurl)
|
arvinddoraiswamy/LearnPython | 17.py | Python | mit | 867 | 0.013841 | import struct
''' Refer to docs for all the exact formats. There are many so check them out before converting things yourself '''
''' If there's a specific offset you want to do things from, use pack_into and unack_into from the docs '''
#Integer to string
i1= 1234
print "Int to string as 8 byte little endian", repr(struct.pack("<Q",i1))
print "Int to string as 8 byte big endian", repr(struct.pack(">Q",i1))
#String to integer. Make sure size of destination matches the length of the string
s1= '1234'
print "String to 4 byte integer little endian", struct.u | npack("<i", s1)
print "String to 4 byte integer big en | dian", struct.unpack(">i", s1)
''' Whenever you want to convert to and from binary, think of binascii '''
import binascii
h1= binascii.b2a_hex(s1)
print "String to hex", h1
uh1= binascii.a2b_hex(h1)
print "Hex to string, even a binary string", uh1
|
ImmobilienScout24/crassus | src/unittest/python/output_converter_tests.py | Python | apache-2.0 | 5,826 | 0 | import json
import unittest
from crassus.deployment_response import DeploymentResponse
from crassus.output_converter import OutputConverter
from mock import call, patch
from utils import load_fixture_json
cfn_event = load_fixture_json('cfn_event.json')
cfn_event_different_termination = load_fixture_json(
'cfn_event_different_termination.json')
class TestOutputConverter(unittest.TestCase):
"""
Tests for OutputConverter.
"""
def setUp(self):
self.event = cfn_event
self.context = {}
self.output_converter = OutputConverter(self.event, self.context)
# Patch get_lambda_config_property
self.patch_getconfig = patch(
'crassus.output_converter.get_lambda_config_property')
self.mock_getconfig = self.patch_getconfig.start()
self.mock_getconfig.return_value = ['OUTPUT-SQS-QUEUE-1']
# Patch logger
self.patch_logger = patch('crassus.output_converter.logger')
self.mock_logger = self.patch_logger.start()
# Patch sqs_send_message
self.patch_sqs_send = patch(
'crassus.output_converter.sqs_send_message')
self.mock_sqs_send = self.patch_sqs_send.start()
def teardown(self):
self.patch_getconfig.stop()
self.patch_logger.stop()
self.patch_sqs_send.stop()
def test_cast_type_string(self):
"""
_cast_type() should return string if the input is not a valid
JSON.
"""
invalid_json = '{foo'
return_value = self.output_converter._cast_type(invalid_json)
self.assertEqual(return_value, invalid_json)
def test_cast_type_json(self):
"""
_cast_type() should return a python type if the input is a valid
JSON.
"""
valid_json = '{"foo":"bar"}'
return_value = self.output_converter._cast_type(valid_json)
self.assertEqual(return_value, json.loads(valid_json))
def test_parser_parses_correctly(self):
"""
Test if the input event is parsed correctly.
We only check parts of the input event, which implies it was
parsed properly.
"""
return_value = self.output_converter._parse_sns_message(
self.event['Records'][0]['Sns']['Message'])
self.assertEqual(return_value['ResourceStatus'], 'CREATE_IN_PROGRESS')
self.assertEqual(return_value['Namespace'], 123456789012)
self.assertEqual(return_value['StackName'], 'crassus-karolyi-temp1')
self.assertNotEqual(
return_value['StackName'], 'crassus-karolyi-temp1garbage')
self.assertEqual(return_value['ResourceProperties'], {
'Action': 'lambda:invokeFunction',
'SourceArn':
'arn:aws:sns:eu-west-1:123456789012:crassus-karolyi-temp1-'
'cfnOutputSnsTopic-KKF3Y90CS6SA',
'FunctionName':
'crassus-karolyi-temp1-cfnOutputConverterFunction-'
'7T8X9HH83YRH',
'Principal': 'sns.amazonaws.com'})
def test_different_termination_parsed(self):
"""
Test if the input event is parsed correctly. In this test, the
message parameter does not have a "'\n" termination at the end
of it, yet the parser parses the last parameter correctly.
We only check parts of the input event, which implies it was
parsed properly.
"""
return_value = self.output_converter._parse_sns_message(
cfn_event_different_termination['Records'][0]['Sns']['Message'])
self.assertEqual(return_value['ResourceStatus'], 'CREATE_IN_PROGRESS')
self.assertEqual(return_value['Namespace'], 123456789012)
# Al | l hail the successful parsing, last parameter is StackName!
self.assertEqual(return_value['StackName'], 'crassus-karolyi-temp1')
# ... and the value should NOT have a closing quote.
self.assertNotEqual(
return_value['StackName'], 'crassus-karolyi-temp1\' | ')
self.assertEqual(return_value['ResourceProperties'], {
'Action': 'lambda:invokeFunction',
'SourceArn':
'arn:aws:sns:eu-west-1:123456789012:crassus-karolyi-temp1-'
'cfnOutputSnsTopic-KKF3Y90CS6SA',
'FunctionName':
'crassus-karolyi-temp1-cfnOutputConverterFunction-'
'7T8X9HH83YRH',
'Principal': 'sns.amazonaws.com'})
def test_converts_correctly(self):
"""
convert() should call and initialize the right
functions/objects.
"""
self.output_converter.convert()
self.assertEqual(self.mock_logger.warning.call_count, 0)
self.mock_sqs_send.assert_called_once_with(
['OUTPUT-SQS-QUEUE-1'], {
'status': 'CREATE_IN_PROGRESS',
'timestamp': '2015-11-23T16:53:46.443Z',
'stackName': 'crassus-karolyi-temp1',
'version': '1.1',
'message': 'Resource creation Initiated',
'emitter': 'cloudformation',
'resourceType': 'AWS::Lambda::Permission'})
deployment_parameter = self.mock_sqs_send.call_args[0][1]
self.assertIs(type(deployment_parameter), DeploymentResponse)
def test_skips_empty_messages(self):
"""
If there is no 'Sns' or 'Message' in the received event list,
log a warning.
"""
self.output_converter.event = {'Records': [{}, {'foo': 1}]}
self.output_converter.convert()
self.assertFalse(self.mock_sqs_send.called)
self.assertEqual(list(self.mock_logger.warning.call_args_list), [
call('No \'Sns\' or \'Message\' in received event: {}'),
call('No \'Sns\' or \'Message\' in received event: {\'foo\': 1}')])
|
benjaminjack/pinetree | tests/models/__init__.py | Python | mit | 18 | 0 | # f | rom . impor | t *
|
araichev/invoicing | invoicing/main.py | Python | mit | 10,632 | 0.005173 | """
CONVENTIONS:
- A timesheet object is a Pandas DataFrame object with at least the columns
* ``'date'``: date worked was done; datetime object
* ``'project'``: project the work is part of
* ``'duration'``: time spent on work; hours
- A biller is a univariate function that maps duration to cost (in some currency units)
- All dates described below are YYYYMMDD strings unless specified otherwise
"""
from collections import OrderedDict
from pathlib import Path
import pandas as pd
import numpy as np
import jinja2 as j2
#: Default date format
DATE_FORMAT = '%Y%m%d'
#: Acceptable time units
VALID_TIME_UNITS = [
'min', # minutes
'h', # hours
]
#---------------------------------------
# Reading
#---------------------------------------
def parse_date(date_str, date_format=DATE_FORMAT):
"""
Given a date string and a date format,
parse the date string and return its
resulting datetime object.
"""
if date_str is None:
return None
return pd.datetime.strptime(date_str, '%Y%m%d')
def build_convert_to_hours(time_units):
"""
Given a time units string (one of ``VALID_TIME_UNITS``),
return a function that converts from the time units to
hours.
"""
if time_units not in VALID_TIME_UNITS:
raise ValueError('Time units must be one of', VALID_TIME_UNITS)
if time_units == 'min':
return lambda x: x/60
elif time_units == 'h':
return lambda x: x
def read_timesheet(path, date_format=DATE_FORMAT, input_time_units='h'):
"""
Read a timesheet CSV located at the given path (string or Path object)
and return its corresponding timesheet data frame.
The timesheet must contain at least the columns
- ``'date'``: date string in the format specified by ``date_format``,
e.g '%Y%m%d'
- ``'project'``: project name; string
- ``'duration'``: time spent on project in units specified by
the string ``input_time_units`` which must lie in ``VALID_TIME_UNITS``,
e.g. 'min' for minutes.
"""
f = pd.read_csv(path, parse_dates=['date'],
date_parser=lambda x: parse_date(x, DATE_FORMAT))
f = f.sort_values('date')
# Convert to hours
convert_to_hours = build_convert_to_hours(input_time_units)
f['duration'] = f['duration'].map(convert_to_hours)
return f
#---------------------------------------
# Manipulation
#---------------------------------------
def slice_by_dates(timesheet, date1=None, date2=None):
"""
Return the portion of the timesheet for which the date satisfies
date1 <= date <= date2.
"""
d1, d2 = map(parse_date, [date1, date2])
return timesheet.copy().set_index('date')[d1:d2].reset_index()
def agg_by_project(timesheet, date1=None, date2=None, freq=None):
"""
Slice the given timesheet by the given dates then aggregate total
duration by project.
If a Pandas frequency string is given (e.g. 'W' for calendar week),
then resample by that frequency and then aggregate duration by project.
Return a data frame with the columns:
- ``'start_date'``: start date of the time period corresponding to the
given frequency, or the first date in the sliced timesheet
- ``'end_date'``: end date of the time period corresponding to the
given frequency, or the last date in the sliced timesheet
- ``'project'``
- ``'duration'``: total duration on project in period
"""
f = slice_by_dates(timesheet, date1, date2)
if freq is not None:
f = f.groupby('project').apply(
lambda x: x.set_index('date')[['duration']].resample(freq
).sum().fillna(0)).reset_index()
f = f[['date', 'project', 'duration']].sort_values(
'date')
f['period'] = f['date'].map(lambda x: pd.Period(x, freq))
f['start_date'] = f['period'].map(lambda x: x.start_time)
f['end_date'] = f['period'].map(lambda x: x.end_time)
else:
start_date | , end_date = f['date'].min(), f['date'].max()
f = f.groupby('project').agg({'duration': np.sum}
).reset_index()
f['start_date'] = start_date
f['end_date'] = end_date
return f[['start_date', 'end_date', 'project', 'duration']].copy()
#---------------------- | -----------------
# Billing
#---------------------------------------
def decompose(x, bins):
"""
Given a number x (the input ``x``)
and a list of numbers x_1, x_2, ..., x_n
(the input list ``bins``) whose sum is at least x and
whose last element may equal the non-number ``np.inf``
(which represents positive infinity),
find the least k < n such that
x = x_1 + x_2 + ... + x_k + r
where 0 <= r < x_{k+1}.
Return the list [x_1, x_2, ..., x_k, r, 0, ..., 0]
of length n.
EXAMPLES::
>>> decompose(17, [10, 15, np.inf])
[10, 7, 0]
>>> decompose(27, [10, 15, np.inf])
[10, 15, 2]
>>> decompose(17, [np.inf])
[17]
>>> decompose(17, [10])
[17]
"""
# Validity check
if x > sum(bins):
raise ValueError('The sum of the bins must be at least as great as x')
parts = []
prev_bin = 0
for bin in bins:
if x >= bin:
parts.append(bin)
x -= bin
elif x > 0:
parts.append(x)
x = 0
else:
parts.append(0)
return parts
def build_linear_biller(rate, base_fee=0, freq=None, name=None):
"""
Return a biller with the given hourly rate, base fee,
and billing frequency
(Pandas frequency string such as 'W' for weekly billing).
Uses :func:`build_piecewise_linear_biller`.
The returned function also contains some metadata as shown
in the examples below.
EXAMPLES::
>>> b = build_linear_biller(100, base_fee=1)
>>> b(17)
171
>>> b.__dict__
{'base_fee': 3,
'bins': [inf],
'freq': 'M',
'kind': 'linear',
'name': None,
'rates': [30]}
"""
biller = build_piecewise_linear_biller(base_fee=base_fee,
bins=[np.inf], rates=[rate], freq=freq, name=name)
biller.kind = 'linear'
return biller
def build_piecewise_linear_biller(bins, rates, base_fee=0, freq=None,
name=None):
"""
Return a biller that charges at the given billing frequency
(Pandas frequency string such as 'W' for weekly billing)
the given base fee plus the given hourly rates for the given
chunks of times listed in ``bins``.
The returned function also contains some metadata as shown
in the examples below.
EXAMPLES::
>>> bins = [10, 15, np.inf]
>>> decompose(27, bins)
[10, 15, 2]
>>> rates = [1, 2, 3]
>>> b = build_piecewise_linear_biller(bins, rates, base_fee=1)
>>> b(27)
47 # = 1 + 1*10 + 2*15 + 3*2
>>> b.__dict__
{'base_fee': 1,
'bins': [10, 15, inf],
'freq': None,
'kind': 'piecewise_linear',
'name': None,
'rates': [1, 2, 3]}
"""
def biller(x):
return base_fee + np.dot(decompose(x, bins), rates)
biller.name = name
biller.kind = 'piecewise_linear'
biller.base_fee = base_fee
biller.bins = bins
biller.rates = rates
biller.freq = freq
return biller
def compute_costs(timesheet, biller, date1=None, date2=None):
"""
Slice the given timesheet to the given dates and compute
the cost of the total duration according to the given biller.
Return a new data frame with the columns
- ``'start_date'``: start date of the time period corresponding to the
biller's frequency, or the first date in the sliced timesheet
- ``'end_date'``: end date of the time period corresponding to the
biller's frequency, or the last date in the sliced timesheet
- ``'duration'``: duration resampled at the biller's frequency
via summing
- ``'rate'``: cost per hour; ``np.inf`` in case of base fee
- ``'cost'``: duration multiplied by rate
If the biller has bins, then the total duration is decomposed
by the biller's bins.
""" |
nirs/vdsm | tests/virt/thinp_test.py | Python | gpl-2.0 | 5,860 | 0 | #
# Copyright 2017-2020 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import logging
from vdsm.virt import thinp
from vdsm.common.config import config
from vdsm.common.units import MiB, GiB
from vdsm.virt.vmdevices.storage import Drive, BLOCK_THRESHOLD
import pytest
@pytest.mark.parametrize("enabled", [True, False])
def test_enable_on_create(enabled):
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log, enabled=enabled)
assert mon.enabled() == enabled
def test_enable_runtime():
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log, enabled=False)
mon.enable()
assert mon.enabled() is True
def test_disable_runtime():
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log, enabled=True)
mon.disable()
assert mon.enabled() is False
def test_set_threshold():
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log)
vda = make_drive(vm.log, index=0, iface='virtio')
vm.drives.append(vda)
apparentsize = 4 * GiB
chunk_size = config.getint("irs", "volume_utilization_chunk_mb") * MiB
free = (100 - config.getint("irs", "volume_utilization_percent")) / 100
threshold = chunk_size * free
# TODO: Use public API.
mon._set_threshold(vda, apparentsize, 1)
expected = apparentsize - threshold
assert vm._dom.thresholds == [('vda[1]', expected)]
def test_set_threshold_drive_too_small():
# We seen the storage subsystem creating drive too small,
# less than the minimum supported size, 1GiB.
# While this is a storage issue, the volume monitor should
# be fixed no never set negative thresholds.
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log)
vda = make_drive(vm.log, index=0, iface='virtio')
vm.drives.append(vda)
apparentsize = 128 * MiB
# TODO: Use public API.
mon._set_threshold(vda, apparentsize, 3)
target, value = vm._dom.thresholds[0]
assert target == 'vda[3]'
assert value >= 1
def test_clear_threshold():
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log)
# one drive (virtio, 0)
vda = make_drive(vm.log, index=0, iface='virtio')
# clear the 1st element in the backing chain of the drive
mon.clear_threshold(vda, 1)
assert vm._dom.thresholds == [('vda[1]', 0)]
def test_on_block_threshold_drive_name_ignored():
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log)
vda = make_drive(vm.log, index=0, iface='virtio')
vm.drives.append(vda)
mon.on_block_threshold("vda", vda.path, 512 * MiB, 10 * MiB)
assert vda.threshold_state == BLOCK_THRESHOLD.UNSET
def test_on_block_threshold_indexed_name_handled():
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log)
vda = make_drive(vm.log, index=0, iface='virtio')
vm.drives.append(vda)
mon.on_block_threshold("vda[1]", vda.path, 512 * MiB, 10 * MiB)
assert vda.threshold_state == BLOCK_THRESHOLD.EXCEEDED
def test_on_block_threshold_unknown_drive():
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log)
vda = make_drive(vm.log, index=0, iface='virtio')
vm.drives.append(vda)
mon.on_block_threshold("vdb", "/unkown/path", 512 * MiB, 10 * MiB)
assert vda.threshold_state == BLOCK_THRESHOLD.UNSET
def test_monitoring_needed():
class FakeDrive:
def __init__(self, flag):
self.flag = flag
def needs_monitoring(self):
return self.flag
vm = FakeVM()
mon = thinp.VolumeMonitor(vm, vm.log)
assert not mon.m | onitoring_needed()
vm.drives.append(FakeDrive(False))
assert not mon.mon | itoring_needed()
vm.drives.append(FakeDrive(True))
assert mon.monitoring_needed()
vm.drives.append(FakeDrive(False))
assert mon.monitoring_needed()
mon.disable()
assert not mon.monitoring_needed()
mon.enable()
assert mon.monitoring_needed()
vm.drives[1].flag = False
assert not mon.monitoring_needed()
class FakeVM(object):
log = logging.getLogger('test')
def __init__(self):
self.id = "fake-vm-id"
self.drives = []
self.block_stats = []
self._dom = FakeDomain()
def getDiskDevices(self):
return self.drives[:]
def query_block_stats(self):
return self.block_stats
class FakeDomain(object):
def __init__(self):
self.thresholds = []
def setBlockThreshold(self, drive_name, threshold):
self.thresholds.append((drive_name, threshold))
def make_drive(log, index, **param_dict):
conf = drive_config(
index=str(index),
domainID='domain_%s' % index,
poolID='pool_%s' % index,
imageID='image_%s' % index,
volumeID='volume_%s' % index,
**param_dict
)
return Drive(log, **conf)
def drive_config(**kw):
""" Return drive configuration updated from **kw """
conf = {
'device': 'disk',
'format': 'cow',
'iface': 'virtio',
'index': '0',
'path': '/path/to/volume',
'propagateErrors': 'off',
'shared': 'none',
'type': 'disk',
'readonly': False,
}
conf.update(kw)
return conf
|
richm/designate | designate/objects/tsigkey.py | Python | apache-2.0 | 756 | 0 | # Copyright (c) 2014 Rackspace Hosting
# All Rights Reserved.
# |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance wi | th the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from designate.objects.base import BaseObject
class TsigKey(BaseObject):
FIELDS = ['name', 'algorithm', 'secret']
|
mgedmin/python-livereload | server.py | Python | bsd-3-clause | 173 | 0 | # coding: | utf-8
from livereload impor | t Server, shell
server = Server()
server.watch('docs/*.rst', shell('make html'))
server.serve(root='docs/_build/html', open_url=True)
|
sharadbhat/Video-Sharing-Platform | Client/client.py | Python | mit | 36,232 | 0.014186 | from flask import Flask, redirect, url_for, session, request, render_template_string, abort
import requests
import os
import ast
import base64
from nocache import nocache
#App config
ALLOWED_EXTENSIONS = set(['mp4'])
app = Flask(__name__)
app.secret_key = os.urandom(24)
@app.errorhandler(404)
@nocache
def error_404(e):
"""
- Displays the 404 error page.
"""
error_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('404.html'))).content).decode("utf-8") # Done
return render_template_string(error_page)
@app.errorhandler(403)
@nocache
def error_403(e):
"""
- Displays the 404 error page.
"""
error_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('403.html'))).content).decode("utf-8") # Done
return render_template_string(error_page)
@app.route("/", methods = ['GET'])
@nocache
def start(): #WORKS
"""
- The starting page.
- Redirects to login page if not logged in.
- Redirects to dashboard if logged in.
"""
logged_in = False
if 'user' in session:
logged_in = True
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
return redirect(url_for('dashboard'))
most_viewed_video_IDs = ((requests.get('http://127.0.0.1:8080/get-most-viewed')).content).decode("utf-8") # Done
most_viewed = {}
most_viewed_video_IDs = ast.literal_eval(most_viewed_video_IDs)
for ID in most_viewed_video_IDs:
title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(ID))).content).decode("utf-8") # Done
views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(ID))).content).decode("utf-8") # Done
uploader = ((requests.get(url='http://127.0.0.1:8080/uploader/{}'.format(ID))).content).decode("utf-8") # Done
details = [title, views, uploader]
most_viewed.update({ID : details})
homepage = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('homepage.html'))).content).decode("utf-8") # Done
return render_template_string(homepage, logged_in = logged_in, most_viewed = most_viewed)
@app.route("/login", methods = ['POST', 'GET'])
@nocache
def login_form(): #WORKS
"""
In GET request,
- Redirects to dashboard if logged in.
- Displays login form if not logged in.
"""
if request.method == 'GET':
login_error = request.args.get('l_error', False)
if 'user' in session:
return redirect(url_for("start"))
else:
login_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('login.html'))).content).decode("utf-8") # Done
return render_template_string(login_page, loginError = login_error)
"""
In POST request
- Gets data from form.
- Validates user credentials.
"""
if request.method == 'POST':
if 'user' in session:
return redirect(url_for('dashboard'))
username = (request.form['username']).lower().strip()
password = (request.form['password'])
is_valid_user = ((requests.post(url='http://127.0.0.1:8080/is-valid-user', data={'username' : username, 'password' : password})).content).decode("utf-8") # Done
if is_valid_user == "True":
session['user'] = username
return redirect(url_for("start"))
else:
return redirect(url_for("login_form", l_error = True))
@app.route("/signup", methods = ['GET', 'POST'])
@nocache
def signup_form(): #WORKS
"""
In GET request
- Displays sign up page.
"""
if request.method == 'GET':
if 'user' in session:
return redirect(url_for('start'))
signup_error = request.args.get('s_error', False)
signup_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('signup.html'))).content).decode("utf-8") # Done
return render_template_string(signup_page, signupError = signup_error)
"""
In POST request
- Gets data from form.
- Checks if username is not already present.
- Adds to database if not present.
- Redirects to dashboard.
"""
if request.method == 'POST':
username = (request.form['username']).lower().strip()
password = (request.form['password'])
is_valid_username = ((requests.get(url='http://127.0.0.1:8080/is-valid-username/{}'.format(username))).content).decode("utf-8") # Done
if is_valid_username == "False":
requests.post(url='http://127.0.0.1:8080/add-user', data={'username' : username, 'password' : password}) # Done
session['user'] = username
return redirect(url_for("start"))
else:
return redirect(url_for("signup_form", s_error = True))
@app.route("/change-password", methods = ['GET', 'POST'])
@nocache
def password_update_form(): #WORKS
"""
In GET request
- Redirects to login page if not logged in.
- Displays the password update form.
"""
if request.method == 'GET':
u | _error = request.args.get('u_error', False)
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
password_update_page = ((requests.get(url='http://127.0.0.1:808 | 0/html/{}'.format('password_update.html'))).content).decode("utf-8") # Done
if u_error == False:
return render_template_string(password_update_page)
else:
return render_template_string(password_update_page, update_error = True)
"""
In POST request
- Gets the old and new passwords.
- Checks the old password.
- If it matches the stored password, password is updated.
- Otherwise, error is thrown.
"""
if request.method == 'POST':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
username = session['user']
old_password = request.form['oldPassword']
new_password = request.form['newPassword']
done = (requests.post(url='http://127.0.0.1:8080/update-password', data={'username' : username, 'old_password' : old_password, 'new_password' : new_password}).content).decode("utf-8") # Done
if done == "True":
return redirect(url_for('start'))
else:
return redirect(url_for('password_update_form', u_error = True))
@app.route("/delete", methods = ['GET', 'POST'])
@nocache
def delete_own_account(): #WORKS
"""
In GET request
- Displays confirmation page.
"""
if request.method == 'GET':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
confirmation_error = request.args.get('c_error', False)
confirmation_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('account_delete_confirm.html'))).content).decode("utf-8") # Done
if confirmation_error == False:
return render_template_string(confirmation_page)
else:
return render_template_string(confirmation_page, c_error = True)
"""
In POST request
- Deletes the user credentials from the database.
- Redirects to login page.
"""
if request.method == 'POST':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
username = session['user']
password = request.form['password']
is_d |
procangroup/edx-platform | cms/djangoapps/course_creators/tests/test_admin.py | Python | agpl-3.0 | 8,540 | 0.004567 | """
Tests course_creators.admin.py.
"""
import mock
import django
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from django.core import mail
from django.http import HttpRequest
from django.test import TestCase
from course_creators.admin import CourseCreatorAdmin
from course_creators.models import CourseCreator
from student import auth
from student.roles import CourseCreatorRole
def mock_render_to_string(template_name, context):
"""Return a string that encodes template_name and context"""
return str((template_name, context))
class CourseCreatorAdminTest(TestCase):
"""
Tests for course creator admin.
"""
def setUp(self):
""" Test case setup """
super(CourseCreatorAdminTest, self).setUp()
self.user = User.objects.create_user('test_user', 'test_user+courses@edx.org', 'foo')
self.table_entry = CourseCreator(user=self.user)
self.table_entry.save()
self.admin = User.objects.create_user('Mark', 'admin+courses@edx.org', 'foo')
self.admin.is_staff = True
self.request = HttpRequest()
self.request.user = self.admin
self.creator_admin = CourseCreatorAdmin(self.table_entry, AdminSite())
self.studio_request_email = 'mark@marky.mark'
self.enable_creator_group_patch = {
"ENABLE_CREATOR_GROUP": True,
"STUDIO_REQUEST_EMAIL": self.studio_request_email
}
@mock.patch('course_creators.admin.render_to_string', mock.Mock(side_effect=mock_render_to_string, autospec=True))
@mock.patch('django.contrib.auth.models.User.email_user')
def test_change_status(self, email_user):
"""
Tests that updates to state impact the creator group maintained in authz.py and that e-mails are sent.
"""
def change_state_and_verify_email(state, is_creator):
""" Changes user state, verifies creator status, and verifies e-mail is sent based on transition """
self._change_state(state)
self.assertEqual(is_creator, auth.user_has_role(self.user, CourseCreatorRole()))
context = {'studio_request_email': self.studio_request_email}
if state == CourseCreator.GRANTED:
template = 'emails/course_creator_granted.txt'
elif state == CourseCreator.DENIED:
template = 'emails/course_creator_denied.txt'
else:
template = 'emails/course_creator_revoked.txt'
email_user.assert_called_with(
mock_render_to_string('emails/course_creator_subject.txt', context),
mock_render_to_string(template, context),
self.studio_request_email
)
with mock.patch.dict('django.conf.settings.FEATURES', self.enable_creator_group_patch):
# User is initially unrequested.
self.assertFalse(auth.user_has_role(self.user, CourseCreatorRole()))
change_state_and_verify_email(CourseCreator.GRANTED, True)
change_state_and_verify_email(CourseCreator.DENIED, False)
change_state_and_verify_email(CourseCreator.GRANTED, True)
change_state_and_verify_email(CourseCreator.PENDING, False)
change_state_and_verify_email(CourseCreator.GRANTED, True)
change_state_and_verify_email(CourseCreator.UNREQUESTED, False)
change_state_and_verify_email(CourseCreator.DENIED, False)
@mock.patch('course_creators.admin.render_to_string', mock.Mock(side_effect=mock_render_to_string, autospec=True))
def test_mail_admin_on_pending(self):
"""
Tests that the admin account is notified when a user is in the 'pending' state.
"""
def check_admin_message_state(state, expect_sent_to_admin, expect_sent_to_user):
""" Changes user state and verifies e-mail sent to admin address only when pending. """
mail.outbox = []
self._change_state(state)
# If a message is sent to the user about course creator status change, it will be the first
# message sent. Admin message will follow.
base_num_emails = 1 if expect_sent_to_user else 0
if expect_sent_to_admin:
# TODO: Remove Django 1.11 upgrade shim
# SHIM: Usernames come back as unicode in 1.10+, remove this shim post-upgrade
if django.VERSION < (1, 10):
context = {'user_name': 'test_user', 'user_email': u'test_user+courses@edx.org'}
else:
context = {'user_name': u'test_user', 'user_email': u'test_user+courses@edx.org'}
self.assertEquals(base_num_emails + 1, len(mail.outbox), 'Expected admin message to be sent')
sent_mail = mail.outbox[base_num_emails]
self.assertEquals(
mock_render_to_string('emails/course_creator_admin_subject.txt', context),
sent_mail.subject
)
self.assertEquals(
mock_render_to_string('emails/course_creator_admin_user_pending.txt', context),
sent_mail.body
)
self.assertEquals(self.studio_request_email, sent_mail.from_email)
self.assertEqual([self.studio_request_email], sent_mail.to)
else:
self.assertEquals(base_num_emails, len(mail.outbox))
with mock.patch.dict('django.conf.settings.FEATURES', self.enable_creator_group_patch):
# E-mail message should be sent to admin only when new state is PENDING, regardless of what
# previous state was (unless previous state was already PENDING).
# E-mail message sent to user only on transition into and out of GRANTED state.
check_admin_message_state(CourseCreator.UNREQUESTED, expect_sent_to_admin=False, expect_sent_to_user=False)
check_admin_message_state(CourseCreator.PENDING, expect_sent_to_admin=True, expect_sent_to_user=False)
check_admin_message_state(CourseCreator.GRANTED, expect_sent_to_admin=False, expect_sent_to_user=True)
check_admin_messa | ge_state(CourseCreator.DENIED, expect_sent_to_admin=False, expect_sent_to_user=True)
check_admin_message_state(CourseCreator.GRANTED, expect_sent_to_admin=False, expect_sent_to_user=Tr | ue)
check_admin_message_state(CourseCreator.PENDING, expect_sent_to_admin=True, expect_sent_to_user=True)
check_admin_message_state(CourseCreator.PENDING, expect_sent_to_admin=False, expect_sent_to_user=False)
check_admin_message_state(CourseCreator.DENIED, expect_sent_to_admin=False, expect_sent_to_user=True)
def _change_state(self, state):
""" Helper method for changing state """
self.table_entry.state = state
self.creator_admin.save_model(self.request, self.table_entry, None, True)
def test_add_permission(self):
"""
Tests that staff cannot add entries
"""
self.assertFalse(self.creator_admin.has_add_permission(self.request))
def test_delete_permission(self):
"""
Tests that staff cannot delete entries
"""
self.assertFalse(self.creator_admin.has_delete_permission(self.request))
def test_change_permission(self):
"""
Tests that only staff can change entries
"""
self.assertTrue(self.creator_admin.has_change_permission(self.request))
self.request.user = self.user
self.assertFalse(self.creator_admin.has_change_permission(self.request))
def test_rate_limit_login(self):
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_CREATOR_GROUP': True}):
post_params = {'username': self.user.username, 'password': 'wrong_password'}
# try logging in 30 times, the default limit in the number of failed
# login attempts in one 5 minute period before the rate gets limited
for _ in xrange(30):
response = self.client.post('/admin/login/', post_params)
se |
kdlucas/pyrering | lib/pyreringconfig.py | Python | apache-2.0 | 10,508 | 0.003616 | #!/usr/bin/python
#
# Copyright 2008 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module to control the application global properties.
This module will manage a singleton object for the PyreRing global
properties. These properties include: root_dir, testdatabase etc.
These properties are stored as a dictionary which is referred through a global
variable and managed by some module level methods in this module.
"""
__author__ = 'mwu@google.com (Mingyu Wu)'
import getpass
import os
import time
from lib import filesystemhandlerextend
# Runtime configuration keys, user can't overwrite through config file.
NON_OVERWRITTEN_KEYS = ['time', 'tester', 'host_name']
class PyreRingConfig(object):
"""A class to store PyreRing runtime config info in a dict.
This class is used to manage the pyrering related configuration data
and it will have a dictionary to hold them and pushed to global. It should be
maintained as a single instance.
During the whole test run, this is the only one copy of the properties.
It will contain a dictionary with key value pairs from the config file and
some extra items generated automatically, namely:
Automatically set by PyreRing, not user configurable:
root_dir: PyreRing root directory.
PyreRing automatically discovers it.
host_name: The machine name PyreRing is running on.
PyreRing automatically discovers it.
tester: The user account PyreRing is running as.
PyreRing automatically discovers it.
time: The time string identifies the pyrering was started.
PyreRing automatically discovers it.
Managed by config file only, not through command line:
log_level: The logging level as defined in Python logging module.
default value is INFO
skip_setup: If True, PyreRing will skip user setup suite.
default value is False.
header_file: User specified report header file which will be insert into
PyreRing report.
default value is <root_dir>/header_info.txt
FATAL_STRING: a string contains comma separated substrings. If any
substring is found in the test output, the test will fail,
regardless of the return code of the test.
default_suite: The name of default test suite, not currently used.
No default value.
Managed by config file and user can overwrite through command line options:
report_dir: the PyreRing report and log directory.
default value <root_dir>/reports/
conf_file: the name of PyreRing config file with path. If a non_absolute
path provided, the actual value will be os.path.join(ed) with
'<root_dir>/conf'
defau | lt name is pyrering.conf
project_name: The name of a project PyreRing will test on.
sendmail: a boolean value if PyreRing should send out email report or not.
default value is False. Note: there will be no email if all test
passed regardless of this flag.
email_recipients: comma separated email addresses as email recipients.
default value is the same as tester.
log_file: the name of the log file. If a | non_absulte path provided, the
the actual value will be os.path.join(ed) with
'<root_dir>/report'
default name is pyrering.log
file_errors: a boolean value that turns on filing the output of each none
passing testcase to a separate output file.
reset: a boolean value user sets from the command line. If true, the run
time configuration will replace existing configuration file. It has
no effect in the conf file.
"""
def __init__(self,
filesystem=filesystemhandlerextend.FileSystemHandlerExtend()):
self.settings = {}
self.filesystem = filesystem
def _CreateConfig(self):
"""Create a config file based on user config plus default config.
This method should create a new config file using some runtime information.
Returns:
None. The constructed info write to conf_file
"""
key_list = sorted(self.settings.keys())
output = ''.join(['%s=%s\n' % (key, self.settings[key])
for key in key_list])
self.filesystem.WriteToFile(self.settings['conf_file'], output)
print """
***********Attention Please***************************
Either no configuration file was found at: %s
Or a reset option was issued.
Creating a default configuration file.
User can edit it later to change default values at: %s.
******************************************************
""" % (self.settings['conf_file'], self.settings['conf_file'])
def _ReadConfig(self):
"""Convert the conf_file to a dictionary.
Returns:
a dictionary with key value pairs from the conf file.
"""
settings = {}
conf_handler = self.filesystem.FileOpenForRead(self.settings['conf_file'])
for line in conf_handler:
line = line.strip()
if (not line) or line.startswith('#') or (not '=' in line):
continue
key, value = line.split('=', 1)
# make it java.util.Properties like property reader.
# so I have to strip the quotes around the values
key = key.strip()
value = value.strip(' \t\r\'"')
# sendmail, reset and skip_setup should be treated as boolean values,
# others are treated as strings.
if key in ['sendmail', 'reset', 'skip_setup']:
settings[key] = (value.lower().startswith('true') or
value.startswith('1'))
else:
settings[key] = value
conf_handler.close()
# Remove the config we don't need. Most likely they will be generated on the
# runtime.
for key in NON_OVERWRITTEN_KEYS:
settings.pop(key, None)
return settings
def _AddDefaultConfig(self, pyrering_root):
"""Populate the settings dictionary with default values.
This method will provide a base configuration dictionary for PyreRing.
Args:
pyrering_root: path refer to the pyrering root dir.
Returns:
None.
"""
self.settings.update({
'root_dir': pyrering_root,
'report_dir': self.filesystem.PathJoin(pyrering_root, 'reports'),
'conf_file': self.filesystem.PathJoin(pyrering_root,
'conf',
'pyrering.conf'),
'host_name': self.filesystem.GetHostName(),
'tester': getpass.getuser(),
'project_name': '<YOUR PROJECT NAME>',
'default_suite': 'default_suite',
'source_dir': '<YOUR TEST SCRIPT TOP DIRECTORY>',
'sendmail': False,
'email_recipients': getpass.getuser(),
'log_file': 'pyrering.log',
'file_errors': False,
'reset': False,
'runner': 'baserunner',
'FATAL_STRING': '',
'header_file': 'header_info.txt',
'skip_setup': False,
'log_level': 'INFO',
# A timestamp string to identify the time pyrering is started.
# The format should be yyymmddHHMM
'time': time.strftime('%Y%m%d%H%M'),
})
def Populate(self, pyrering_root, user_settings):
"""Populate settings dictionary.
If the conf file exist, it will use user settings update conf file
settings and update default settings.
If the conf file doesn't exist, it will user user settings update default
settings and export as conf file.
Args:
pyrering_root: the path of the project root
user_settings: user settings dictionary
Returns:
|
securify/pref-finder | pref-calc.py | Python | apache-2.0 | 3,310 | 0.009063 | ## Flow for determining preferences
# Command line for prefs
## ex. fire, cold, phys, magi, asph, acid, pois, elec
# read for each file in directory and then create dict
# for line in test_file
# of lstrip starts with "if line.strip().startswith("replace: prefix-list") and list_name is None :"
## ex dict abbot {}
# for line in file
# add each line to a dict like Abbot.fire = X
# add total count = X
# return value queried in command line arg ex fire
# else no value return all Values for Mob
##############
import sys
import pprint
import re
file_arg = sys.argv[2]
command_pref = sys.argv[1]
# set global mob list
moblist = []
## dictchecker for mob
def checklist(mobname):
try:
mobname=moblist.mobname
except:
pass
else:
moblist.append(mobname)
#
# return pref and calc
def return_pref(command_pref,dictname):
#print(dictname)
#print(command_pref) ## Debug
#print(dictname[command_pref]) ## Debug
#print(dictname['kyas']) ## Debug
pref = int(dictname[command_pref]) / int(dictname['kyas'])
print(command_pref.capitalize(), pref, "for", dictname['kyas'], "Kyas" )
def testerfunction():
with open(file_arg) as readfile:
for line in readfile:
if line.strip().startswith("/set TOTAL_KYA_TARGET="):
dictname = line.strip().split(sep="=")[1]
print(dictname)
dictname ={}
elif line.strip().startswith("/set TOTAL_KYAS="):
##print("match") ## Debug
dictname['kyas'] = line.strip().split(sep="=")[1]
##print(dictname.keys()) ##Debug
#print(dictname['kyas'])
elif line.strip().startswith("/set TOTAL_ACID="):
dictname['acid'] = line.strip().split(sep="=")[1]
elif line.strip().startswith("/set TOTAL_ASPH="):
dictname['asph'] = line.strip().split(sep="=")[1]
elif line.strip().startswith("/set TOTAL_COLD="):
dictname['cold'] = line.strip().split(sep="=")[1]
elif line.st | rip().startswith("/set TOTAL_ELEC="):
dictname['elec'] = line.strip().split(sep="=")[1]
elif line.strip().startswith("/set TOTAL_FIRE="):
dictname['fire'] = line.strip().split(sep="=")[1]
elif line.strip().startswith("/set TOTAL_MAGI=") | :
dictname['magi'] = line.strip().split(sep="=")[1]
elif line.strip().startswith("/set TOTAL_PHYS="):
dictname['phys'] = line.strip().split(sep="=")[1]
elif line.strip().startswith("/set TOTAL_POIS="):
dictname['acid'] = line.strip().split(sep="=")[1]
elif line.strip().startswith("/set TOTAL_PSIO="):
dictname['psi'] = line.strip().split(sep="=")[1]
if command_pref == None:
pass
else:
#print(command_pref)
#print(dictname.keys())
#print(dictname[command_pref])
#print(dictname['kyas'])
return_pref(command_pref,dictname)
#pref = int(dictname[command_pref]) / int(dictname['kyas'])
#print(command_pref.capitalize(), "=", pref)
testerfunction()
|
pombredanne/re-core | test/test_utils.py | Python | agpl-3.0 | 3,251 | 0 | # Copyright (C) 2014 SEE AUTHORS FILE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pika
import mock
from . import TestCase, unittest
from recore import utils
from recore import amqp
# Mocks
channel = mock.MagicMock()
connection = mock.MagicMock()
connection.channel = mock.MagicMock(return_value=channel)
class TestUtils(TestCase):
def tearDown(self):
"""
Reset mocks.
"""
channel.reset_mock()
def test_create_json_str(self):
"""
Verify create_json_str produces proper json
"""
assert utils.create_json_str({'test': 'data'}) == '{"test": "data"}'
assert utils.create_json_str({'test': None}) == '{"test": null}'
self.assertRaises(ValueError, utils.create_json_str, "BAD DATA")
def test_load_json_str(self):
"""
Verify load_json_str produces proper structures
"""
assert utils.load_json_str('{"test": "data"}') == {'test': 'data'}
assert utils.load_json_str | ('{"test": null}') == {'test': None}
self.assertRaises(ValueError, utils.load_json_str, "BAD DATA | ")
# Refacter merged the init_amql and the connect_mq functions
# together. Need to fix this unit test.
#
# def test_connect_mq(self):
# """
# Check that connect_mq follows the expected connection steps
# """
# with mock.patch(
# 'pika.BlockingConnection') as amqp.pika.BlockingConnection:
# amqp.pika.BlockingConnection.return_value = connection
# name = "name"
# server = "127.0.0.1"
# password = "password"
# exchange = "exchange"
# result = amqp.connect_mq(
# name=name, password=password,
# server=server, exchange=exchange)
# assert result[0] == channel
# assert result[1] == connection
# connection_params = amqp.pika.BlockingConnection.call_args[0][0]
# assert connection_params.host == server
# assert connection_params.credentials.username == name
# assert connection_params.credentials.password == password
# channel.exchange_declare.assert_called_with(
# exchange=exchange,
# durable=True,
# exchange_type='topic')
def test_parse_config_file(self):
"""
Verify config parsing works as expected.
"""
self.assertRaises(IOError, utils.parse_config_file, 'doesnotexist')
result = utils.parse_config_file('examples/settings-example.json')
assert type(result) is dict
|
leandro86/epubcreator | epubcreator/pyepub/pyepubreader/opf.py | Python | unlicense | 3,730 | 0.00429 | from lxml import etree
class Opf:
_OPF_NS = "http://www.idpf.org/2007/opf"
DC_NS = "http://purl.org/dc/elements/1.1/"
def __init__(self, opf):
self._opf = etree.parse(opf)
def getSpineItems(self):
return self._xpath(self._opf, "/opf:package/opf:spine/opf:itemref/@idref")
def getAuthors(self):
authorsList = []
authors = self._xpath(self._opf, "/opf:package/opf:metadata/dc:creator[@opf:role = 'aut']")
for author in authors:
authorName = self._xpath(author, "text()")[0]
authorFileAs = self._xpath(author, "@opf:file-as")[0]
authorsList.append((authorName, authorFileAs))
return authorsList
def getTranslators(self):
translatorsList = []
translators = self._xpath(self._opf, "/opf:package/opf:metadata/dc:contributor[@opf:role = 'trl']")
for translator in translators:
translatorName = self._xpath(translator, "text()")[0]
translatorFileAs = self._xpath(translator, "@opf:file-as")[0]
translatorsList.append((translatorName, translatorFileAs))
return translatorsList
def getIlustrators(self):
ilustratorsList = []
ilustrators = self._xpath(self._opf, "/opf:package/opf:metadata/dc:contributor[@opf:role = 'ill']")
for ilustrator in ilustrators:
ilustratorName = self._xpath(ilustrator, "text()")[0]
ilustratorFileAs = self._xpath(ilustrator, "@opf:file-as")[0]
ilustratorsList.append((ilustratorName, ilustratorFileAs))
return ilustratorsList
def getCalibreSerie(self):
serieName = ""
serieIndex = ""
calibreSerie = self._xpath(self._opf, "/opf:package/opf:metadata/opf:meta[@name = 'calibre:series']")
if len(calibreSerie) != 0:
serieName = self._xpath(calibreSerie[0], "@content")[0]
calibreSerieIndex = self._xpath(self._opf, "/opf:package/opf:metadata/opf:meta[@name = 'calibre:series_index']")
if len(calibreSerieIndex) != 0:
serieIndex = self._xpath(calibreSerieIndex[0], "@content")[0]
return serieName, serieIndex
def getDescription(self):
description = self._xpath(self._opf, "/opf:package/opf:metadata/dc:description/text()")
return description[0] if description else None
def getTitle(self):
title = self._xpath(self._opf, "/opf:package/opf:metadata/dc:title/text()")
return title[0] if title else None
def getLanguage(self):
language = self._xpath(self._opf, "/opf:package/opf:metadata/dc:language/text()")
return language[0] if language else None
def getModificationDate(self):
modificationDate = self._xpath(self._opf, "/opf:package/opf:metadata/dc:date[@opf:event = 'modification']/text()")
return modificationDate[0] if modificationDate else None
def getPublicationDate(self):
publicationDate = self._xpath | (self._opf, "/opf:package/opf:metadata/dc:date[@opf:event = 'publication']/text()")
return publicationDate[0] if publicationDate else None
def getPublisher(self):
publisher = self._xpath(self._opf, "/opf:package/opf:metadata/dc:publisher/text() | ")
return publisher[0] if publisher else None
def getSubject(self):
subject = self._xpath(self._opf, "/opf:package/opf:metadata/dc:subject/text()")
return subject[0] if subject else None
def getPathToToc(self):
return self._xpath(self._opf, "/opf:package/opf:manifest/opf:item[@media-type = 'application/x-dtbncx+xml']/@href")[0]
def _xpath(self, element, xpath):
return element.xpath(xpath, namespaces={"opf": Opf._OPF_NS, "dc": Opf.DC_NS}) |
aguerra/python-stuff | stuff/collections.py | Python | bsd-2-clause | 604 | 0 | def | _iter(target, method, key):
iterable = target if method is None else getattr(target, method)()
iterator = iter(iterable)
if key is None:
return iterator
if not callable(key):
raise TypeError('{!r} is not callable'.format(type(key).__name__))
return (each for each in iterator if key(each))
def iterate(target, key=None):
return _iter(target=t | arget, method=None, key=key)
def iter_values(dict_, key=None):
return _iter(target=dict_, method='values', key=key)
def iter_items(dict_, key=None):
return _iter(target=dict_, method='items', key=key)
|
tieusangaka/datacollect | pdb_infotable/pdb_infotable.py | Python | gpl-3.0 | 4,974 | 0.008243 | #!/usr/bin/env python
# Tested in Python 3
# Sebastian Raschka, 2014
# An interactive command line app for
# creating a PDB file info table.
# For help, execute
# ./pdb_infotable.py --help
import bs4
import urllib
import pyprind
import pandas as pd
class Pdb(object):
def __init__(self, pdb_code):
self.code = pdb_code.strip().lower()
self.reso = None
self.desc = '-'
self.titl = None
self.ligs = {}
self.meth = None
self.soup = None
self.cont = None
def lookup(self):
self.soup = self.__get_soup()
self.__get_resolution()
self.__get_title()
self.__get_description()
self.__get_pdbcontent()
self.__get_ligands()
def get_summary(self):
self.lookup()
summary = [self.code, self.desc, self.reso, self.meth, self.ligs, self.titl]
return summary
def __get_soup(self):
url = 'http://www.rcsb.org/pdb/explore/explore.do?structureId=' + self.code
return bs4.BeautifulSoup(urllib.request.urlopen(url))
def __get_pdbcontent(self):
url = 'http://www.rcsb.org/pdb/files/' + self.code + '.pdb'
r = urllib.request.urlopen(url)
self.cont = r.readlines()
def __get_resolution(self):
try:
reso_tag = self.soup.find('td', {'id': 'se_xrayResolution'})
resolution = reso_tag.contents[0].strip()
self.meth = 'X-Ray'
self.reso = resolution
except AttributeError:
self.meth = 'NMR'
self.reso = '-'
def __get_title(self):
try:
parent = self.soup.find('div', {'id': 'se_structureTitle'})
child = parent.find('span', {'class': 'h3'})
title = child.contents[0]
self.titl = title
except AttributeError:
self.titl = '-'
def __get_description(self):
try:
desc_tag = self.soup.find('td', {'class': 'mdauData', 'colspan':"99"})
description = desc_tag.contents[0].strip()
self.desc = description
except AttributeError:
self.desc = '-'
def __get_ligands(self):
for i in self.cont:
i = i.decode('utf-8')
if i.startswith('HETNAM'):
s = i.split('HETNAM')[1].strip()
sp = s.split()
short = sp[0]
if len(short) == 1:
short = sp[1]
desc = " ".join(sp[2:])
else:
desc = " ".join(sp[1:])
if short in self.ligs:
self.ligs[short] += desc
else:
self.ligs[short] = desc
def make_table(csv_in, csv_out):
df = pd.read_csv(csv_in, sep=',', header=None)
df.columns = ['PDB']
progress_bar = False
if df.shape[0] > 3:
progress_bar = pyprind.ProgBar(df.shape[0])
descs = []
resolutions = []
methods = []
ligands_short = []
ligands_long = []
titles = []
for row in df.index:
new_pdb = Pdb(df.loc[row]['PDB'])
new_pdb.lookup()
descs.append(new_pdb.desc.lower())
resolutions.append(new_pdb.reso)
methods.append(new_pdb.meth)
ligands_short.append('; '.join(new_pdb.ligs.keys()))
ligands_long.append('; '.join(new_pdb.ligs.values()))
titles.append(new_pdb.titl)
if progress_bar:
progress_bar.update()
#df.to_csv(csv_out)
df['Description'] = pd.Series(descs, df.index)
df['Resolution (A)'] = pd.Series(resolutions, df.index)
df['Method'] = pd.Series(methods, df.index)
df['Title'] = pd.Series(titles, df.index)
df['Ligands (short)'] = pd.Series(ligands_short, df.index)
df['Ligands (long)'] = pd.Series(ligands_long, df.index)
df.to_csv(csv_out)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
description='A command line tool for creating a PDB file info table.',
formatter_class=argparse.RawTextHelpFormatter,
epilog='\nExample run:\n'\
'./pdb_infotable.py -i pdb_codes.txt -o ./pdb_table.csv\n\n'\
'Example input file:\n'\
'1htg\n3eiy\n1hvr\n[...]')
parser.add | _argument('-i', '--input', help='A 1 | -column text file with PDB codes.')
parser.add_argument('-o', '--output', help='Filename for creating the output CSV file.')
parser.add_argument('-v', '--version', action='version', version='v. 1.0')
args = parser.parse_args()
if not args.output:
print('Please provide a filename for creating the output CSV file.')
quit()
if not args.input:
print('Please provide a file for the PDB codes.')
quit()
make_table(args.input, args.output)
|
mjpost/sacreBLEU | sacrebleu/tokenizers/tokenizer_13a.py | Python | apache-2.0 | 985 | 0 | from functools import lru_cache
from .tokenizer_base import Bas | eTokenizer
from .tokenizer_re import TokenizerRegexp
class Tokenizer13a(BaseTokenizer):
def signature(self):
return '13a'
def __init__(self):
self._post_tokeni | zer = TokenizerRegexp()
@lru_cache(maxsize=2**16)
def __call__(self, line):
"""Tokenizes an input line using a relatively minimal tokenization
that is however equivalent to mteval-v13a, used by WMT.
:param line: a segment to tokenize
:return: the tokenized line
"""
# language-independent part:
line = line.replace('<skipped>', '')
line = line.replace('-\n', '')
line = line.replace('\n', ' ')
if '&' in line:
line = line.replace('"', '"')
line = line.replace('&', '&')
line = line.replace('<', '<')
line = line.replace('>', '>')
return self._post_tokenizer(f' {line} ')
|
Zarthus/CloudBotRefresh | plugins/rottentomatoes.py | Python | gpl-3.0 | 1,312 | 0.001524 | from cloudbot import hook
from cloudbot.util import http
api_root = 'http://api.rottentomatoes.com/api/public/v1.0/'
movie_search_url = api_root + 'movies.json'
movie_reviews_url = api_root + 'movi | es/%s/reviews.json'
@hook.command('rt')
def rottentomatoes(inp, bot=None):
"""rt <title> -- gets ratings for <title> from Rotten Tomatoes"""
api_key = bot.config.get("api_keys", {}).get("rottentomatoes", None)
if not api_key:
| return "error: no api key set"
title = inp.strip()
results = http.get_json(movie_search_url, q=title, apikey=api_key)
if results['total'] == 0:
return 'No results.'
movie = results['movies'][0]
title = movie['title']
movie_id = movie['id']
critics_score = movie['ratings']['critics_score']
audience_score = movie['ratings']['audience_score']
url = movie['links']['alternate']
if critics_score == -1:
return
reviews = http.get_json(movie_reviews_url % movie_id, apikey=api_key, review_type='all')
review_count = reviews['total']
fresh = critics_score * review_count / 100
rotten = review_count - fresh
return "{} - Critics Rating: \x02{}%\x02 ({} liked, {} disliked) " \
"Audience Rating: \x02{}%\x02 - {}".format(title, critics_score, fresh, rotten, audience_score, url)
|
RPGOne/Skynet | pytorch-master/torch/legacy/nn/VolumetricConvolution.py | Python | bsd-3-clause | 6,906 | 0.001158 | import math
import torch
from .Module import Module
from .utils import clear
class VolumetricConvolution(Module):
def __init__(self, nInputPlane, nOutputPlane, kT, kW, kH, dT=1, dW=1, dH=1, padT=0, padW=None, padH=None):
super(VolumetricConvolution, self).__init__()
self.nInputPlane = nInputPlane
self.nOutputPlane = nOutputPlane
self.kT = kT
self.kW = kW
self.kH = kH
self.dT = dT
self.dW = dW
self.dH = dH
self.padT = padT
self.padW = padW if padW is not None else self.padT
self.padH = padH if padH is not None else self.padW
self.weight = torch.Tensor(nOutputPlane, nInputPlane, kT, kH, kW)
self.bias = torch.Tensor(nOutputPlane)
self.gradWeight = torch.Tensor(nOutputPlane, nInputPlane, kT, kH, kW)
self.gradBias = torch.Tensor(nOutputPlane)
self.reset()
self.finput = None
self.fgradInput = None
self._gradOutput = None
def reset(self, stdv=None):
if stdv is not None:
stdv = stdv * math.sqrt(3)
else:
stdv = 1. / math.sqrt(self.kT * self.kW * self.kH * self.nInputPlane)
self.weight.uniform_(-stdv, stdv)
self.bias.uniform_(-stdv, stdv)
def _makeContiguous(self, input, gradOutput=None):
if not input.is_contiguous():
if self._input is None:
self._input = input.new()
self._input.resize_as_(input).copy_(input)
input = self._input
if gradOutput is not None:
if not gradOutput.is_contiguous():
if self._gradOutput is None:
self._gradOutput = gradOutput.new()
self._gradOutput.resize_as_(gradOutput).copy_(gradOutput)
gradOutput = self._gradOutput
return input, gradOutput
return input
# function to re-view the weight layout in a way that would make the MM ops happy
def _viewWeight(self):
self.weight = self.weight.view(self.nOutputPlane, self.nInputPlane * self.kT * self.kH * self.kW)
if self.gradWeight is not None and self.gradWeight.dim() > 0:
self.gradWeight = self.gradWeight.view(self.nOutputPlane, self.nInputPlane * self.kT * self.kH * self.kW)
def _unviewWeight(self):
self.weight = self.weight.view(self.nOutputPlane, self.nInputPlane, self.kT, self.kH, self.kW)
if self.gradWeight is not None and self.gradWeight.dim() > 0:
self.gradWeight = self.gradWeight.view(self.nOutputPlane, self.nInputPlane, self.kT, self.kH, self.kW)
def updateOutput(self, input):
if self.finput is None:
self.finput = input.new()
if self.fgradInput is None:
self.fgradInput = input.new()
if input.type() == 'torch.cuda.FloatTensor':
self._backend.VolumetricConvolution_updateOutput(
self._backend.library_state,
input,
self.output,
self.weight,
self.bias,
self.finput,
self.fgradInput,
self.dT, self.dW, self.dH,
self.padT, self.padW, self.padH
)
else:
self._viewWeight()
input = self._makeContiguous(input)
self._backend.VolumetricConvolutionMM_updateOutput(
self._backend.library_state,
input,
self.output,
self.weight,
self.bias,
self.finput,
self.kT, self.kW, self.kH,
self.dT, self.dW, self.dH,
self.padT, self.padW, self.padH
)
self._unviewWeight()
return self.output
def updateGradInput(self, input, gradOutput):
if self.gradInput is None:
return
if input.type() == 'torch.cuda.FloatTensor':
self._backend.VolumetricConvolution_updateGradInput(
self._backend.library_state,
input,
gradOutput,
self.gradInput,
self.weight,
self.finput,
self.dT, self.dW, self.dH,
self.padT, self.padW, self.padH
)
else:
self._viewWeight()
input, gradOutput = self._makeContiguous(input, gradOutput)
self._backend.VolumetricConvolutionMM_updateGradInput(
self._backend.library_state,
input,
gradOutput,
self.gradInput,
self.weight,
self.finput,
self.fgradInput,
self.kT, self.kW, self.kH,
self.dT, self.dW, self.dH,
self.padT, self.padW, self.padH
)
self._unviewWeight()
return self.gradInput
def accGradParameters(self, input, gradOutput, scale=1):
if input.type() == 'torch.cuda.FloatTensor':
self._backend.VolumetricConvolution_accGradParameters(
| self._backend.library_state,
input,
gradOutput,
self.gradWeight,
self.gradBias,
self.finput,
self.fgradInput,
self.dT, self.dW, self.dH,
self.padT, self.padW, self.padH,
sc | ale
)
else:
input, gradOutput = self._makeContiguous(input, gradOutput)
self._viewWeight()
self._backend.VolumetricConvolutionMM_accGradParameters(
self._backend.library_state,
input,
gradOutput,
self.gradWeight,
self.gradBias,
self.finput,
self.kT, self.kW, self.kH,
self.dT, self.dW, self.dH,
self.padT, self.padW, self.padH,
scale
)
self._unviewWeight()
def type(self, type, tensorCache=None):
clear(self, 'finput', 'fgradInput')
return super(VolumetricConvolution, self).type(type, tensorCache)
def clearState(self, ):
clear(self, 'finput', 'fgradInput', '_input', '_gradOutput')
return super(VolumetricConvolution, self).clearState()
def __repr__(self):
s = super(VolumetricConvolution, self).__repr__()
s += '({} -> {}, {}x{}x{}'.format(self.nInputPlane, self.nOutputPlane, self.kT, self.kW, self.kH)
if self.dT != 1 or self.dW != 1 or self.dH != 1 or \
self.padT != 0 or self.padW != 0 or self.padH != 0:
s += ', {}, {}, {}'.format(self.dT, self.dW, self.dH)
if self.padT != 0 or self.padW != 0 or self.padH != 0:
s += ', {}, {}, {}'.format(self.padT, self.padW, self.padH)
s += ')'
return s
|
ariddell/pystan | pystan/tests/test_utf8.py | Python | gpl-3.0 | 1,618 | 0.004988 | import unittest
from pystan import stanc, StanModel
from pystan._compat import PY2
class TestUTF8(unittest.TestCase):
desired = sorted({"status", "model_cppname", "cppcode", "model_name", "model_code", "include_paths"})
def test_utf8(self):
model_code = 'parameters {real y;} model {y ~ normal(0,1);}'
result = stanc(model_code=model_code)
self.assertEqual(sorted(result.keys()), self.desired)
self.assertTrue(result['cppcode'].startswith("// Code generated by Stan "))
self.assertEqual(result['status'], 0)
def test_utf8_linecomment(self):
model_code = u'parameters {real y;\n //äöéü\n} model {y ~ normal(0,1);}'
result = stanc(model_code=model_code)
self.assertEqual(sorted(result.keys()), self.desired)
self.assertTrue(result['cppcode'].startswith("// Code generated by Stan " | ))
self.assertEqual(result['status'], 0)
def test_utf8_multilinecomment(self | ):
model_code = u'parameters {real y;\n /*äöéü\näöéü*/\n} model {y ~ normal(0,1);}'
result = stanc(model_code=model_code)
self.assertEqual(sorted(result.keys()), self.desired)
self.assertTrue(result['cppcode'].startswith("// Code generated by Stan "))
self.assertEqual(result['status'], 0)
def test_utf8_inprogramcode(self):
model_code = u'parameters {real ö;\n} model {ö ~ normal(0,1);}'
assertRaisesRegex = self.assertRaisesRegexp if PY2 else self.assertRaisesRegex
with assertRaisesRegex(ValueError, 'Failed to parse Stan model .*'):
stanc(model_code=model_code)
|
c4mb0t/django-setman | setman/helpers.py | Python | bsd-3-clause | 1,048 | 0 | from setman import settings
from setman.utils import is_settings_container
__all__ = ('get_config', )
def get_config(name, default=None):
"""
Helper function to easy fetch ``name`` from database or django settings and
return ``default`` value if setting key isn't found.
But if not ``default`` value is provided (``None``) the ``AttributeError``
exception can raised if setting key isn't found.
If ``name`` is one of available ``app_name` | ` function raises
``ValueError`` cause cannot to returns config value.
For fetching app setting use next definition:
``<app_name>.<setting_name>``.
"""
app_name = None
if '.' in name:
app_name, name = name.split('.', 1)
values = getattr(settings, app_name) if app_name else settings
if default is not None:
result = getattr(values, name, default)
else:
result = getattr(va | lues, name)
if is_settings_container(result):
raise ValueError('%r is settings container, not setting.' % name)
return result
|
mxmaslin/Test-tasks | django_test_tasks/old_django_test_tasks/apps/playschool/serializers.py | Python | gpl-3.0 | 721 | 0 | import base64
import imghdr
import six
import uuid
from django.core.files.base import ContentFile
from rest_framework import serializers
from .models import Scholar, Record
cl | ass ScholarSerializer(serializers.ModelSerializer):
class Meta:
model = Scholar
fields = (
'pk',
'photo',
'name',
'sex',
'birth_date',
'school_class',
'is_studying')
class RecordSerializer(serializers.ModelSerializer):
class Meta:
model = Record
fields = (
'pk',
'scholar',
'date',
| 'has_came_with',
'time_arrived',
'time_departed'
)
|
ayepezv/GAD_ERP | addons/account_voucher/models/account_voucher.py | Python | gpl-3.0 | 19,955 | 0.005362 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, api, _
import odoo.addons.decimal_precision as dp
from odoo.exceptions import UserError
class AccountVoucher(models.Model):
_name = 'account.voucher'
_description = 'Accounting Voucher'
_inherit = ['mail.thread']
_order = "date desc, id desc"
@api.model
def _default_journal(self):
voucher_type = self._context.get('voucher_type', 'sale')
company_id = self._context.get('company_id', self.env.user.company_id.id)
domain = [
('type', '=', voucher_type),
('company_id', '=', company_id),
]
return self.env['account.journal'].search(domain, limit=1)
voucher_type = fields.Selection([
('sale', 'Sale'),
('purchase', 'Purchase')
], string='Type', readonly=True, states={'draft': [('readonly', False)]}, oldname="type")
name = fields.Char('Payment Reference',
readonly=True, states={'draft': [('readonly', False)]}, default='')
date = fields.Date("Bill Date", readonly=True,
sel | ect=Tru | e, states={'draft': [('readonly', False)]},
copy=False, default=fields.Date.context_today)
account_date = fields.Date("Accounting Date",
readonly=True, select=True, states={'draft': [('readonly', False)]},
help="Effective date for accounting entries", copy=False, default=fields.Date.context_today)
journal_id = fields.Many2one('account.journal', 'Journal',
required=True, readonly=True, states={'draft': [('readonly', False)]}, default=_default_journal)
account_id = fields.Many2one('account.account', 'Account',
required=True, readonly=True, states={'draft': [('readonly', False)]},
domain="[('deprecated', '=', False), ('internal_type','=', (pay_now == 'pay_now' and 'liquidity' or voucher_type == 'purchase' and 'payable' or 'receivable'))]")
line_ids = fields.One2many('account.voucher.line', 'voucher_id', 'Voucher Lines',
readonly=True, copy=True,
states={'draft': [('readonly', False)]})
narration = fields.Text('Notes', readonly=True, states={'draft': [('readonly', False)]})
currency_id = fields.Many2one('res.currency', compute='_get_journal_currency',
string='Currency', readonly=True, required=True, default=lambda self: self._get_currency())
company_id = fields.Many2one('res.company', 'Company',
required=True, readonly=True, states={'draft': [('readonly', False)]},
related='journal_id.company_id', default=lambda self: self._get_company())
state = fields.Selection([
('draft', 'Draft'),
('cancel', 'Cancelled'),
('proforma', 'Pro-forma'),
('posted', 'Posted')
], 'Status', readonly=True, track_visibility='onchange', copy=False, default='draft',
help=" * The 'Draft' status is used when a user is encoding a new and unconfirmed Voucher.\n"
" * The 'Pro-forma' status is used when the voucher does not have a voucher number.\n"
" * The 'Posted' status is used when user create voucher,a voucher number is generated and voucher entries are created in account.\n"
" * The 'Cancelled' status is used when user cancel voucher.")
reference = fields.Char('Bill Reference', readonly=True, states={'draft': [('readonly', False)]},
help="The partner reference of this document.", copy=False)
amount = fields.Monetary(string='Total', store=True, readonly=True, compute='_compute_total')
tax_amount = fields.Monetary(readonly=True, store=True, compute='_compute_total')
tax_correction = fields.Monetary(readonly=True, states={'draft': [('readonly', False)]},
help='In case we have a rounding problem in the tax, use this field to correct it')
number = fields.Char(readonly=True, copy=False)
move_id = fields.Many2one('account.move', 'Journal Entry', copy=False)
partner_id = fields.Many2one('res.partner', 'Partner', change_default=1, readonly=True, states={'draft': [('readonly', False)]})
paid = fields.Boolean(compute='_check_paid', help="The Voucher has been totally paid.")
pay_now = fields.Selection([
('pay_now', 'Pay Directly'),
('pay_later', 'Pay Later'),
], 'Payment', select=True, readonly=True, states={'draft': [('readonly', False)]}, default='pay_later')
date_due = fields.Date('Due Date', readonly=True, select=True, states={'draft': [('readonly', False)]})
@api.one
@api.depends('move_id.line_ids.reconciled', 'move_id.line_ids.account_id.internal_type')
def _check_paid(self):
self.paid = any([((line.account_id.internal_type, 'in', ('receivable', 'payable')) and line.reconciled) for line in self.move_id.line_ids])
@api.model
def _get_currency(self):
journal = self.env['account.journal'].browse(self._context.get('journal_id', False))
if journal.currency_id:
return journal.currency_id.id
return self.env.user.company_id.currency_id.id
@api.model
def _get_company(self):
return self._context.get('company_id', self.env.user.company_id.id)
@api.multi
@api.depends('name', 'number')
def name_get(self):
return [(r.id, (r.number or _('Voucher'))) for r in self]
@api.one
@api.depends('journal_id', 'company_id')
def _get_journal_currency(self):
self.currency_id = self.journal_id.currency_id.id or self.company_id.currency_id.id
@api.multi
@api.depends('tax_correction', 'line_ids.price_subtotal')
def _compute_total(self):
for voucher in self:
total = 0
tax_amount = 0
for line in voucher.line_ids:
tax_info = line.tax_ids.compute_all(line.price_unit, voucher.currency_id, line.quantity, line.product_id, voucher.partner_id)
total += tax_info.get('total_included', 0.0)
tax_amount += sum([t.get('amount',0.0) for t in tax_info.get('taxes', False)])
voucher.amount = total + voucher.tax_correction
voucher.tax_amount = tax_amount
@api.one
@api.depends('account_pay_now_id', 'account_pay_later_id', 'pay_now')
def _get_account(self):
self.account_id = self.account_pay_now_id if self.pay_now == 'pay_now' else self.account_pay_later_id
@api.onchange('date')
def onchange_date(self):
self.account_date = self.date
@api.onchange('partner_id', 'pay_now')
def onchange_partner_id(self):
if self.pay_now == 'pay_now':
liq_journal = self.env['account.journal'].search([('type', 'in', ('bank', 'cash'))], limit=1)
self.account_id = liq_journal.default_debit_account_id \
if self.voucher_type == 'sale' else liq_journal.default_credit_account_id
else:
if self.partner_id:
self.account_id = self.partner_id.property_account_receivable_id \
if self.voucher_type == 'sale' else self.partner_id.property_account_payable_id
else:
self.account_id = self.journal_id.default_debit_account_id \
if self.voucher_type == 'sale' else self.journal_id.default_credit_account_id
@api.multi
def button_proforma_voucher(self):
self.signal_workflow('proforma_voucher')
return {'type': 'ir.actions.act_window_close'}
@api.multi
def proforma_voucher(self):
self.action_move_line_create()
@api.multi
def action_cancel_draft(self):
self.create_workflow()
self.write({'state': 'draft'})
@api.multi
def cancel_voucher(self):
for voucher in self:
voucher.move_id.button_cancel()
voucher.move_id.unlink()
self.write({'state': 'cancel', 'move_id': False})
@api.multi
def unlink(self):
for voucher in self:
if voucher.state not in ('draft', 'cancel'):
raise UserError(_('Cannot delete voucher(s) which are already opened or paid.'))
return super(AccountVoucher, self).unlink()
@api.multi
|
aronsky/home-assistant | homeassistant/components/venstar/__init__.py | Python | apache-2.0 | 3,291 | 0.000608 | """The venstar component."""
import asyncio
from requests import RequestException
from venstarcolortouch import VenstarColorTouch
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PIN,
CONF_SSL,
CONF_USERNAME,
)
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.entity import Entity
from .const import _LOGGER, DOMAIN, VENSTAR_TIMEOUT
PLATFORMS = ["climate"]
async def async_setup_entry(hass, config):
"""Set up the Venstar thermostat."""
username = config.data.get(CONF_USERNAME)
password = config.data.get(CONF_PASSWORD)
pin = config.data.get(CONF_PIN)
host = config.data[CONF_HOST]
timeout = VENSTAR_TIMEOUT
protocol = "https" if config.data[CONF_SSL] else "http"
client = VenstarColorTouch(
addr=host,
timeout=timeout,
user=username,
password=password,
pin=pin,
proto=protocol,
)
try:
await hass.async_add_executor_job(client.update_info)
except (OSError, RequestException) as ex:
raise ConfigEntryNotReady(f"Unable to connect to the thermostat: {ex}") from ex
hass.data.setdefault(DOMAIN, {})[config.entry_id] = client
hass.config_entries.async_setup_platforms(config, PLATFORMS)
return True
async def async_unload_entry(hass, | config):
"""Unload the config config and platforms."""
unload_ok = await hass.config_entries.async_unload_platforms(config, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(config.entry_id)
return unload_ok
class VenstarEn | tity(Entity):
"""Get the latest data and update."""
def __init__(self, config, client):
"""Initialize the data object."""
self._config = config
self._client = client
async def async_update(self):
"""Update the state."""
try:
info_success = await self.hass.async_add_executor_job(
self._client.update_info
)
except (OSError, RequestException) as ex:
_LOGGER.error("Exception during info update: %s", ex)
# older venstars sometimes cannot handle rapid sequential connections
await asyncio.sleep(3)
try:
sensor_success = await self.hass.async_add_executor_job(
self._client.update_sensors
)
except (OSError, RequestException) as ex:
_LOGGER.error("Exception during sensor update: %s", ex)
if not info_success or not sensor_success:
_LOGGER.error("Failed to update data")
@property
def name(self):
"""Return the name of the thermostat."""
return self._client.name
@property
def unique_id(self):
"""Set unique_id for this entity."""
return f"{self._config.entry_id}"
@property
def device_info(self):
"""Return the device information for this entity."""
return {
"identifiers": {(DOMAIN, self._config.entry_id)},
"name": self._client.name,
"manufacturer": "Venstar",
# pylint: disable=protected-access
"model": f"{self._client.model}-{self._client._type}",
# pylint: disable=protected-access
"sw_version": self._client._api_ver,
}
|
ehabkost/virt-test | qemu/tests/multi_vms_file_transfer.py | Python | gpl-2.0 | 6,006 | 0.002498 | import time, os, logging
from autotest.client import utils
from autotest.client.shared import error
from virttest import remote, utils_misc
@error.context_aware
def run_multi_vms_file_transfer(test, params, env):
"""
Transfer a file back and forth between multi VMs for long time.
1) Boot up two VMs .
2) Create a large file by dd on host.
3) Copy this file to VM1.
4) Compare copied file's md5 with original file.
5) Copy this file from VM1 to VM2.
6) Compare copied file's md5 with original file.
7) Copy this file from VM2 to VM1.
8) Compare copied file's md5 with original file.
9) Repeat step 5-8
@param test: KVM test object.
@param params: Dictionary with the test parameters.
@param env: Dictionary with test environment.
"""
def md5_check(session, orig_md5):
msg = "Compare copied file's md5 with original file."
error.context(msg, logging.info)
md5_cmd = "md5sum %s | awk '{print $1}'" % guest_path
s, o = session.cmd_status_output(md5_cmd)
if s:
msg = "Fail to get md5 value from guest. Output is %s" % o
raise error.TestError(msg)
new_md5 = o.splitlines()[-1]
if new_md5 != orig_md5:
msg = "File changed after transfer host -> VM1. Original md5 value"
msg += " is %s. Current md5 value is %s" % (orig_md5, new_md5)
raise error.TestFail(msg)
vm1 = env.get_vm(params["main_vm"])
vm1.verify_alive()
login_timeout = int(params.get("login_timeout", 360))
vm2 = env.get_vm(params["vms"].split()[-1])
vm2.verify_alive()
session_vm1 = vm1.wait_for_login(timeout=login_timeout)
session_vm2 = vm2.wait_for_login(timeout=login_timeout)
transfer_timeout = int(params.get("transfer_timeout", 1000))
username = params.get("username")
password = params.get("password")
port = int(params.get("file_transfer_port"))
if (not port) or (not username) or (not password):
raise error.TestError("Please set file_transfer_port, username,"
" password paramters for guest")
tmp_dir = params.get("tmp_dir", "/tmp/")
repeat_time = int(params.get("repeat_time", "10"))
clean_cmd = params.get("clean_cmd", "rm -f")
filesize = int(params.get("filesize", 4000))
count = int(filesize / 10)
if count == 0:
count = 1
host_path = os.path.join(tmp_dir, "tmp-%s" %
utils_misc.generate_random_string(8))
cmd = "dd if=/dev/zero of=%s bs=10M count=%d" % (host_path, count)
guest_path = (tmp_dir + "file_transfer-%s" %
utils_misc.generate_random_string(8))
try:
error.context("Creating %dMB file on host" % filesize, logging.info)
utils.run(cmd)
orig_md5 = utils.hash_file(host_path, method="md5")
error.context("Transfering file host -> VM1, timeout: %ss" % \
transfer_timeout, logging.info)
t_begin = time.time()
vm1.copy_files_to(host_path, guest_path, timeout=transfer_timeout)
t_end = time.time()
throughput = filesize / (t_end - t_begin)
logging.info("File transfer host -> VM1 succeed, "
"estimated throughput: %.2fMB/s", throughput)
md5_check(session_vm1, orig_md5)
ip_vm1 = vm1.get_address()
ip_vm2 = vm2.get_address()
for i in range(repeat_time):
log_vm1 = os.path.join(test.debugdir, "remote_scp_to_vm1_%s.log" %i)
log_vm2 = os.path.join(test.debugdir, "remote_scp_to_vm2_%s.log" %i)
msg = "Transfering file VM1 -> VM2, timeout: %ss." % transfer_timeout
msg += " Repeat: %s/%s" % (i + 1, repeat_time)
error.context(msg, logging.info)
t_begin = time.time()
s = remote.scp_between_remotes(src=ip_vm1, dst=ip_vm2, port=port,
s_passwd=password, d_passwd=password,
s_name=username, d_name=username,
s_path=guest_path, d_path=guest_path,
timeout=transfer_timeout,
log_filename=log_vm1)
t_end = time.time()
throughput = filesize / (t_end - t_begin)
logging.info("File transfer VM1 -> VM2 succeed, "
"estimated throughput: %.2fMB/s", throughput)
md5_check(session_vm2, orig_md5)
session_vm1.cmd("rm -rf %s" % guest_path)
msg = "Transfering file VM2 -> VM1, timeout: %ss." % transfer_timeout
msg += " Repeat: %s/%s" % (i + 1, repeat_time) |
error.context(msg, logging.info)
t_begin = time.time()
remote.scp_between_remotes(src=ip_vm2, dst=ip_vm1, port=port,
s_passwd=password, d_passwd=password,
s_name=username, d_name=username,
s_path=guest_path, d_path=guest_path,
| timeout=transfer_timeout,
log_filename=log_vm1)
t_end = time.time()
throughput = filesize / (t_end - t_begin)
logging.info("File transfer VM2 -> VM1 succeed, "
"estimated throughput: %.2fMB/s", throughput)
md5_check(session_vm1, orig_md5)
session_vm2.cmd("%s %s" % (clean_cmd, guest_path))
finally:
try:
session_vm1.cmd("%s %s" % (clean_cmd, guest_path))
except Exception:
pass
try:
session_vm2.cmd("%s %s" % (clean_cmd, guest_path))
except Exception:
pass
try:
os.remove(host_path)
except OSError:
pass
if session_vm1:
session_vm1.close()
if session_vm2:
session_vm2.close()
|
DrDub/nlg4patch | nlg4patch/unidiff/tests/test_parser.py | Python | gpl-3.0 | 1,824 | 0.000549 | # -*- coding: utf-8 -*-
# Author: Matías Bordese
"""Tests for the unified diff parser process."""
import os.path
import unittest2
from nlg4patch.unidiff import parser
class TestUnidiffParser(unittest2.TestCase):
"""Tests for Unified Diff Parser."""
def setUp(self):
samples_dir = os.path.dirname(os.path.realpath(__file__))
self.sample_file = os.path.join(samples_dir, 'sample.diff')
self.sample_bad_file = os.path.join(samples_dir, 'sample_bad.diff')
def test_parse_sample(self):
"""Parse sample file."""
with open(self.sample_file) as diff_file:
res = parser.parse_unidiff(diff | _file)
# one file in the patch
self.assertEqual(len(res), 1)
# three hunks
self.assertEqual(len(res[0]), 3)
# Hunk 1: five additions, no deletions, no modifications
self.assertEqual(res[0][0].added, 6)
self.assertEqual(res[0][0].modified, 0)
| self.assertEqual(res[0][0].deleted, 0)
# Hunk 2: no additions, 6 deletions, 2 modifications
self.assertEqual(res[0][1].added, 0)
self.assertEqual(res[0][1].modified, 2)
self.assertEqual(res[0][1].deleted, 6)
# Hunk 3: four additions, no deletions, no modifications
self.assertEqual(res[0][2].added, 4)
self.assertEqual(res[0][2].modified, 0)
self.assertEqual(res[0][2].deleted, 0)
# Check file totals
self.assertEqual(res[0].added, 10)
self.assertEqual(res[0].modified, 2)
self.assertEqual(res[0].deleted, 6)
def test_parse_malformed_diff(self):
"""Parse malformed file."""
with open(self.sample_bad_file) as diff_file:
self.assertRaises(parser.UnidiffParseException,
parser.parse_unidiff, diff_file)
|
Distrotech/reportlab | tests/test_source_chars.py | Python | bsd-3-clause | 3,375 | 0.008 | #!/usr/bin/env python
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
"""This tests for things in source files. Initially, absence of tabs :-)
"""
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, SecureTestCase, GlobDirectoryWalker, printLocation
setOutDir(__name__)
from reportlab.lib.testutils import RL_HOME,testsFolder
__version__=''' $Id$ '''
import os, sys, glob, re
import reportlab
import unittest
from reportlab.lib.utils import open_and_read
class SourceTester(SecureTestCase):
def setUp(self):
SecureTestCase.setUp(self)
try:
fn = __file__
except:
fn = sys.argv[0]
self.output = open(outputfile(os.path.splitext(os.path.basename(fn))[0]+'.txt'),'w')
def checkFileForTabs(self, filename):
txt = open_and_read(filename, 'r')
chunks = txt.split('\t')
tabCount = len(chunks) - 1
if tabCount:
#raise Exception, "File %s contains %d tab characters!" % (filename, tabCount)
self.output.write("file %s contains %d tab characters!\n" % (filename, tabCount))
def checkFileForTrailingSpaces(self, filename):
txt = open_and_read(filename, 'r')
initSize = len(txt)
badLines = 0
badChars = 0
for line in txt.split('\n'):
stripped = line.rstrip()
spaces = len(line) - len(stripped) # OK, so they might be trailing tabs, who cares?
if spac | es:
badLines = badLines + 1
badChars = badChars + spaces
if badChars != 0:
self.output.write("file %s contains %d trailing spaces, or %0.2f%% wastage\n" % (filename, badChars, 100.0*badChars/initSize))
def testFiles(self):
w = GlobDirectory | Walker(RL_HOME, '*.py')
for filename in w:
self.checkFileForTabs(filename)
self.checkFileForTrailingSpaces(filename)
def zapTrailingWhitespace(dirname):
"""Eliminates trailing spaces IN PLACE. Use with extreme care
and only after a backup or with version-controlled code."""
assert os.path.isdir(dirname), "Directory not found!"
print("This will eliminate all trailing spaces in py files under %s." % dirname)
ok = input("Shall I proceed? type YES > ")
if ok != 'YES':
print('aborted by user')
return
w = GlobDirectoryWalker(dirname, '*.py')
for filename in w:
# trim off final newline and detect real changes
txt = open(filename, 'r').read()
badChars = 0
cleaned = []
for line in txt.split('\n'):
stripped = line.rstrip()
cleaned.append(stripped)
spaces = len(line) - len(stripped) # OK, so they might be trailing tabs, who cares?
if spaces:
badChars = badChars + spaces
if badChars != 0:
open(filename, 'w').write('\n'.join(cleaned))
print("file %s contained %d trailing spaces, FIXED" % (filename, badChars))
print('done')
def makeSuite():
return makeSuiteForClasses(SourceTester)
#noruntests
if __name__ == "__main__":
if len(sys.argv) == 3 and sys.argv[1] == 'zap' and os.path.isdir(sys.argv[2]):
zapTrailingWhitespace(sys.argv[2])
else:
unittest.TextTestRunner().run(makeSuite())
printLocation()
|
jazzband/django-constance | tests/redis_mockup.py | Python | bsd-3-clause | 155 | 0 | class Connection(dict) | :
def set(self, key, value):
self[key] = value
def mget(self, keys):
return [ | self.get(key) for key in keys]
|
log2timeline/plaso | tests/parsers/sqlite_plugins/firefox_history.py | Python | apache-2.0 | 6,326 | 0.001265 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Mozilla Firefox history database plugin."""
import collections
import unittest
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import firefox_history
from tests.parsers.sqlite_plugins import test_lib
class FirefoxHistoryPluginTest(test_lib.SQLitePluginTestCase):
"""Tests for the Mozilla Firefox history database plugin."""
def testProcessPriorTo24(self):
"""Tests the Process function on a Firefox History database file."""
# This is probably version 23 but potentially an older version.
plugin = firefox_history.FirefoxHistoryPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['places.sqlite'], plugin)
# The places.sqlite file contains 205 events (1 page visit,
# 2 x 91 bookmark records, 2 x 3 bookmark annotations,
# 2 x 8 bookmark folders).
# However there are three events that do not have a timestamp
# so the test file will show 202 extracted events.
number_of_events = storage_writer.GetNumberOfAttributeContainers('event')
self.assertEqual(number_of_events, 202)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'extraction_warning')
self.assertEqual(number_of_warnings, 0)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'recovery_warning')
self.assertEqual(number_of_warnings, 0)
events = list(storage_writer.GetEvents())
# Check the first page visited event.
expected_event_values = {
'data_type': 'firefox:places:page_visited',
'date_time': '2011-07-01 11:16:21.371935',
'host': 'news.google.com',
'timestamp_desc': definitions.TIME_DESCRIPTION_LAST_VISITED,
'title': 'Google News',
'url': 'http://news.google.com/',
'visit_count': 1,
'visit_type': 2}
self.CheckEventValues(storage_writer, events[0], expected_event_values)
# Check the first bookmark event.
expected_event_values = {
'data_type': 'firefox:places:bookmark',
'date_time': '2011-07-01 11:13:59.266 | 344',
'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED}
self.CheckEventValues(storage_writer, events[1], expected_event_values)
# Check the second bookmark event.
expected_event_values = {
'data_type': 'firefox:places:bookmark',
'date_time': '2011-07-01 11:13:59.267198',
'places_title': (
'folder=BOOKMARKS_MENU&folder=UN | FILED_BOOKMARKS&folder=TOOLBAR&'
'sort=12&excludeQueries=1&excludeItemIfParentHasAnnotation=livemark'
'%2FfeedURI&maxResults=10&queryType=1'),
'timestamp_desc': definitions.TIME_DESCRIPTION_MODIFICATION,
'title': 'Recently Bookmarked',
'type': 'URL',
'url': (
'place:folder=BOOKMARKS_MENU&folder=UNFILED_BOOKMARKS&folder='
'TOOLBAR&sort=12&excludeQueries=1&excludeItemIfParentHasAnnotation='
'livemark%2FfeedURI&maxResults=10&queryType=1'),
'visit_count': 0}
self.CheckEventValues(storage_writer, events[2], expected_event_values)
# Check the first bookmark annotation event.
expected_event_values = {
'data_type': 'firefox:places:bookmark_annotation',
'date_time': '2011-07-01 11:13:59.267146',
'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED}
self.CheckEventValues(storage_writer, events[183], expected_event_values)
# Check another bookmark annotation event.
expected_event_values = {
'content': 'RecentTags',
'data_type': 'firefox:places:bookmark_annotation',
'date_time': '2011-07-01 11:13:59.267605',
'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED,
'title': 'Recent Tags',
'url': 'place:sort=14&type=6&maxResults=10&queryType=1'}
self.CheckEventValues(storage_writer, events[184], expected_event_values)
# Check the second last bookmark folder event.
expected_event_values = {
'data_type': 'firefox:places:bookmark_folder',
'date_time': '2011-03-21 10:05:01.553774',
'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED}
self.CheckEventValues(storage_writer, events[200], expected_event_values)
# Check the last bookmark folder event.
expected_event_values = {
'data_type': 'firefox:places:bookmark_folder',
'date_time': '2011-07-01 11:14:11.766851',
'timestamp_desc': definitions.TIME_DESCRIPTION_MODIFICATION,
'title': 'Latest Headlines'}
self.CheckEventValues(storage_writer, events[201], expected_event_values)
def testProcessVersion25(self):
"""Tests the Process function on a Firefox History database file v 25."""
plugin = firefox_history.FirefoxHistoryPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['places_new.sqlite'], plugin)
# The places.sqlite file contains 84 events:
# 34 page visits.
# 28 bookmarks
# 14 bookmark folders
# 8 annotations
number_of_events = storage_writer.GetNumberOfAttributeContainers('event')
self.assertEqual(number_of_events, 84)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'extraction_warning')
self.assertEqual(number_of_warnings, 0)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'recovery_warning')
self.assertEqual(number_of_warnings, 0)
events = list(storage_writer.GetEvents())
counter = collections.Counter()
for event in events:
event_data = self._GetEventDataOfEvent(storage_writer, event)
counter[event_data.data_type] += 1
self.assertEqual(counter['firefox:places:bookmark'], 28)
self.assertEqual(counter['firefox:places:page_visited'], 34)
self.assertEqual(counter['firefox:places:bookmark_folder'], 14)
self.assertEqual(counter['firefox:places:bookmark_annotation'], 8)
expected_event_values = {
'data_type': 'firefox:places:page_visited',
'date_time': '2013-10-30 21:57:11.281942',
'host': 'code.google.com',
'url': 'http://code.google.com/p/plaso',
'visit_count': 1,
'visit_type': 2}
self.CheckEventValues(storage_writer, events[10], expected_event_values)
if __name__ == '__main__':
unittest.main()
|
tshirtman/ultimate-smash-friends | usf/animations.py | Python | gpl-3.0 | 5,865 | 0.004433 | ################################################################################
# copyright 2008 Gabriel Pettier <gabriel.pettier@gmail.com> #
# #
# This file is part of UltimateSmashFriends #
# #
# UltimateSmashFriends is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# UltimateSmashFriends is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with UltimateSmashFriends. If not, see <http://www.gnu.org/licenses/>.#
################################################################################
'''
This module provide animations for entities, animations are composed of frames,
which are a bit more than an image...
'''
import pygame
import logging
from usf import loaders
class Frame (object):
"""
A Frame is an image of n animations, plus some information on the player
when on this instant of the animation:
* The hardshape is an inner rectangle than delimitate the area that can
collide the world.
* The agressiv points are the damaginds points of a player/entity when it
hit some other player.
* The Vector indicate the speed and direction taken by the entity when in
this state, this combine with the current direction/speed of the entity.
"""
def __init__(self, image, gametime, hardshape, trails=None):
"""
Load a frame from an image, the current gametime, the deplacement/s of
the player in this frame, it's hardshape.
"""
self.image = image
self.trails = trails
self.time = int(gametime)
if type(hardshape) is str:
self.hardshape = pygame.Rect([int(i) for i in hardshape.split(' ')])
elif type(hardshape) is pygame.Rect:
self.hardshape = hardshape
else:
logging.error('incorrect type for hardshape: ', hardshape)
self.hardshape_reverse = (
loaders.image(self.image)[1][2]
- self.hardshape[0] - self.hardshape[2],
self.hardshape[1],
self.hardshape[2],
self.hardshape[3])
self.agressivpoints = []
self.agressivpoints_reverse = []
def add_agressiv_point(self, (c_x, c_y), (vect_x, vect_y)):
"""
add an agressive points of coords, and vector specified, to the list of
agressivpoints of the frame
"""
self.agressivpoints.append(((c_x, c_y), (vect_x, vect_y)))
self.agressivpoints_reverse.append(
((self.hardshape[2] - c_x, c_y), (vect_x, vect_y)))
class PreciseTimedAnimation(object):
"""
This object store the frames of an animation and update the image of the
entity skin.
"""
def __init__(self, frames, attribs, server=False):
self.frames = frames
self.image = frames[0].image
self.rect = loaders.image(self.image, nodisplay=server)[1]
self._start_time = 0
self.playing = 0
self.repeat = int('repeat' in attribs and attribs['repeat'])
self.duration = int('duration' in attribs and attribs['duration'])
self.hardshape = (
('hardshape' in attribs) and
pygame.Rect(
[int(i) for i in attribs['hardshape'].split(' ')]) or 0)
self.update(0, server=server)
self.agressivpoints = []
@property
def start_time(self):
return self._start_time
def start(self, gametime):
"""
set the animation start as now, and the animation as started.
"""
self._start_time = gametime
self.playing = 1
def frame(self, anim_time):
"""
return the current frame depending on the time since the beggining of
the animation.
"""
try:
return filter(lambda x: x.time/1000.0 <= anim_time, self.frames)[-1]
except IndexError:
return self.frames[0]
def update(self, gametime, reverse=False, server=False):
"""
update the state of the animation.
"""
if self.playing:
if (self.duration != 0
and | gametime - self._start_time > self.duration/1000.0):
self.playing = 0
if self.repeat is not 0:
| #FIXME: repeat will not reset properly
self.repeat = max(-1, self.repeat - 1)
self.start(gametime)
else:
frame = self.frame(gametime - self._start_time)
self.image = frame.image
self.trails = frame.trails
if reverse:
self.agressivpoints = frame.agressivpoints_reverse
self.hardshape = frame.hardshape_reverse
else:
self.agressivpoints = frame.agressivpoints
self.hardshape = frame.hardshape
self.rect = loaders.image(self.image, nodisplay=server)[1]
|
botswana-harvard/bcpp-subject | bcpp_subject/forms/medical_diagnoses_form.py | Python | gpl-3.0 | 622 | 0 | from bcpp_subject_form_validators import MedicalDiagno | sesFormValidator
from ..constants import ANNUAL
from ..models import MedicalDiagnoses
from .form_mixins import SubjectModelFormMixin
class MedicalDiagnosesForm (SubjectModelFormMixin):
form_validator_cls = MedicalDiagnosesFormValidator
optional_labels = {
ANNUAL: {'diagnoses': (
| 'Since we spoke with you at our last visit, '
'do you recall or is there a record '
'of having any of the following serious illnesses?'),
}
}
class Meta:
model = MedicalDiagnoses
fields = '__all__'
|
badloop/SickRage | sickbeard/image_cache.py | Python | gpl-3.0 | 13,572 | 0.003389 | # Author: Nic Wolfe <nic@wolfeden.ca>
# URL: https://sickrage.tv
# Git: https://github.com/SickRage/SickRage.git
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import os.path
import sickbeard
from sickbeard import helpers, logger
from sickbeard.metadata.generic import GenericMetadata
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import ShowDirectoryNotFoundException
from hachoir_parser import createParser
from hachoir_metadata import extractMetadata
from hachoir_core.log import log
log.use_print = False
class ImageCache:
def __init__(self):
pass
def __del__(self):
pass
def _cache_dir(self):
"""
Builds up the full path to the image cache directory
"""
return ek(os.path.abspath, ek(os.path.join, sickbeard.CACHE_DIR, 'images'))
def _thumbnails_dir(self):
"""
Builds up the full path to the thumbnails image cache directory
"""
return ek(os.path.abspath, ek(os.path.join, self._cache_dir(), 'thumbnails'))
def poster_path(self, indexer_id):
"""
Builds up the path to a poster cache for a given Indexer ID
:param indexer_id: ID of the show to use in the file name
:return: a full path to the cached poster file for the given Indexer ID
"""
poster_file_name = str(indexer_id) + '.poster.jpg'
return ek(os.path.join, self._cache_dir(), poster_file_name)
def banner_path(self, indexer_id):
"""
Builds up the path to a banner cache for a given Indexer ID
:param indexer_id: ID of the show to use in the file name
:return: a full path to the cached banner file for the given Indexer ID
"""
banner_file_name = str(indexer_id) + '.banner.jpg'
return ek(os.path.join, self._cache_dir(), banner_file_name)
def fanart_path(self, indexer_id):
"""
Builds up the path to a fanart cache for a given Indexer ID
:param indexer_id: ID of the show to use in the file name
:return: a full path to the cached fanart file for the given Indexer ID
"""
fanart_file_name = str(indexer_id) + '.fanart.jpg'
return ek(os.path.join, self._cache_dir(), fanart_file_name)
def poster_thumb_path(self, indexer_id):
"""
Builds up the path to a poster thumb cache for a given Indexer ID
:param indexer_id: ID of the show to use in the file name
:return: a full path to the cached poster thumb file for the given Indexer ID
"""
posterthumb_file_name = str(indexer_id) + '.poster.jpg'
return ek(os.path.join, self._thumbnails_dir(), posterthumb_file_name)
def banner_thumb_path(self, indexer_id):
"""
Builds up the path to a banner thumb cache for a given Indexer ID
:param indexer_id: ID of the show to use in the file name
:return: a full path to the cached banner thumb file for the given Indexer ID
"""
bannerthumb_file_name = str(indexer_id) + '.banner.jpg'
return ek(os.path.join, self._thumbnails_dir(), bannerthumb_file_name)
def has_poster(self, indexer_id):
"""
Returns true if a cached poster exists for the given Indexer ID
"""
poster_path = self.poster_path(indexer_id)
logger.log(u"Checking if file " + str(poster_path) + " exists", logger.DEBUG)
return ek(os.path.isfile, poster_path)
def has_banner(self, indexer_id):
"""
Returns true if a cached banner exists for the given Indexer ID
"""
banner_path = self.banner_path(indexer_id)
logger.log(u"Checking if file " + str(banner_path) + " exists", logger.DEBUG)
return ek(os.path.isfile, banner_path)
def has_fanart(self, indexer_id):
"""
Returns true if a cached fanart exists for the given Indexer ID
"""
fanart_path = self.fanart_path(indexer_id)
logger.log(u"Checking if file " + str(fanart_path) + " exists", logger.DEBUG)
return ek(os.path.isfile, fanart_path)
def has_poster_thumbnail(self, indexer_id):
"""
Returns true if a cached poster thumbnail exists for the given Indexer ID
"""
poster_thumb_path = self.poster_thumb_path(indexer_id)
logger.log(u"Checking if file " + str(poster_thumb_path) + " exists", logger.DEBUG)
return ek(os.path.isfile, poster_thumb_path)
def has_banner_thumbnail(self, indexer_id):
"""
Returns true if a cached banner exists for the given Indexer ID
"""
banner_thumb_path = self.banner_thumb_path(indexer_id)
logger.log(u"Checking if file " + str(banner_thumb_path) + " exists", logger.DEBUG)
return ek(os.path.isfile, banner_thumb_path)
BANNER = 1
POSTER = 2
BANNER_THUMB = 3
POSTER_THUMB = 4
FANART = 5
def which_type(self, path):
"""
Analyzes the image provided and attempts to determine whether it is a poster or banner.
:param path: full path to the image
:return: BANNER, POSTER if it concluded one or the other, or None if the image was neither (or didn't exist)
"""
if not ek(os.path.isfile, path):
logger.log(u"Couldn't check the type of " + str(path) + " cause it doesn't exist", logger.WARNING)
return None
# use hachoir to parse the image for us
img_parser = createParser(path)
img_metadata = extractMetadata(img_parser)
if not img_metadata:
logger.log(u"Unable to get metadata from " + str(path) + ", not using your existing image", logger.DEBUG)
return None
img_ratio = float(img_metadata.get('width')) / float(img_metadata.get('height'))
img_parser.stream._input.close()
# most posters are around 0.68 width/height ratio (eg. 680/1000)
if 0.55 < img_ratio < 0.8:
return self.POSTER
# most banners are around 5.4 width/height ratio (eg. 758/140)
elif 5 < img_ratio < 6:
return self.BANNER
# most fanart are around 1.77777 width/height ratio (eg. 1280/720 and 1920/1080)
elif 1.7 < img_ratio < 1.8:
return self.FANART
else:
logger.log(u"Image has size ratio of " + str(img_ratio) + ", unknown type", logger.WARNING)
return | None
def _cache_image_from_file(self, image_path, img_type, indexer_id):
"""
Takes the image provided and copies it to the cache folder
:param image_path: path to the image we're caching
:param img_type: BANNER or POSTER or FANART
:param indexer_id: id of the show this image belongs to
:return: bool representing success
"""
# generate the path based on th | e type & indexer_id
if img_type == self.POSTER:
dest_path = self.poster_path(indexer_id)
elif img_type == self.BANNER:
dest_path = self.banner_path(indexer_id)
elif img_type == self.FANART:
dest_path = self.fanart_path(indexer_id)
else:
logger.log(u"Invalid cache image type: " + str(img_type), logger.ERROR)
return False
# make sure the cache folder exists before we try copying to it
if not ek(os.path.isdir, self._cache_dir()):
logger.log(u"Image cache dir didn't exist, creating it at " + str(self._cache_dir()))
ek(os.makedirs, self._cache_dir())
if not ek(os.path.isdir, self._thumbn |
0rmi/tyggbot | apiwrappers.py | Python | mit | 3,975 | 0.001006 | import urllib.parse
import urllib.request
import json
import logging
import requests
log = logging.getLogger('tyggbot')
class APIBase:
@staticmethod
def _get(url, headers={}):
try:
req = urllib.request.Request(url, None, headers)
response = urllib.request.urlopen(req)
| except Exception as e:
return None
try:
return response.read().decode('utf-8')
except Exception as e:
log.error(e)
return None
return None
@staticmethod
def _get_json(url, headers={}):
try:
data = APIBase._get(url, headers)
if data:
return js | on.loads(data)
else:
return data
except Exception:
log.exception('Caught exception while trying to parse json data.')
return None
return None
def get_url(self, endpoints=[], parameters={}):
return self.base_url + '/'.join(endpoints) + ('' if len(parameters) == 0 else '?' + urllib.parse.urlencode(parameters))
def getraw(self, endpoints=[], parameters={}):
return APIBase._get(self.get_url(endpoints, parameters), self.headers)
def get(self, endpoints, parameters={}):
try:
data = self.getraw(endpoints, parameters)
if data:
return json.loads(data)
else:
return data
except Exception as e:
log.error(e)
return None
return None
def post(self, endpoints=[], parameters={}, data={}):
try:
req = urllib.request.Request(self.get_url(endpoints, parameters), urllib.parse.urlencode(data).encode('utf-8'), self.headers)
response = urllib.request.urlopen(req)
except Exception as e:
log.error(e)
return None
try:
return response.read().decode('utf-8')
except Exception as e:
log.error(e)
return None
return None
class ChatDepotAPI(APIBase):
def __init__(self):
APIBase.__init__(self)
self.base_url = 'http://chatdepot.twitch.tv/'
self.headers = {
'Accept': 'application/vnd.twitchtv.v3+json'
}
class ImraisingAPI(APIBase):
def __init__(self, apikey):
APIBase.__init__(self)
self.base_url = 'https://imraising.tv/api/v1/'
self.headers = {
'Authorization': 'APIKey apikey="{0}"'.format(apikey),
'Content-Type': 'application/json',
}
class StreamtipAPI(APIBase):
def __init__(self, client_id, access_token):
APIBase.__init__(self)
self.base_url = 'https://streamtip.com/api/'
self.headers = {
'Authorization': client_id + ' ' + access_token,
}
class TwitchAPI(APIBase):
def __init__(self, client_id=None, oauth=None, type='kraken'):
APIBase.__init__(self)
self.base_url = 'https://api.twitch.tv/{0}/'.format(type)
self.headers = {
'Accept': 'application/vnd.twitchtv.v3+json',
}
if client_id:
self.headers['Client-ID'] = client_id
if oauth:
self.headers['Authorization'] = 'OAuth ' + oauth
class SafeBrowsingAPI:
def __init__(self, apikey, appname, appvers):
self.apikey = apikey
self.appname = appname
self.appvers = appvers
return
def check_url(self, url):
base_url = 'https://sb-ssl.google.com/safebrowsing/api/lookup?client=' + self.appname + '&key=' + self.apikey + '&appver=' + self.appvers + '&pver=3.1&url='
url2 = base_url + urllib.parse.quote(url, '')
r = requests.get(url2)
if r.status_code == 200:
return True # malware or phishing
return False # some handling of error codes should be added, they're just ignored for now
|
amm042/pywattnode | powerScout.py | Python | gpl-2.0 | 11,173 | 0.017632 | import pywattnodeapi as mdbus
import struct
import logging
import time
class PowerScoutClient(mdbus.SerialModbusClient):
reg_names = [i.strip() for i in """kWh System LSW
kWh System MSW
kW System
kW Demand System Max
kW Demand System Now
kW System Max
kW System Min
kVARh System LSW
kVARh System MSW
kVAR System
kVAh System LSW
kVAh System MSW
kVA System
Displacement PF System
Apparent PF System
Amps System Avg
Volts Line to Line Avg
Volts Line to Neutral Avg
Volts L1 to L2
Volts L2 to L3
Volts L1 to L3
Line Frequency
kWh L1 LSW
kWh L1 MSW
kWh L2 LSW
kWh L2 MSW
kWh L3 LSW
kWh L3 MSW
kW L1
kW L2
kW L3
kVARh L1 LSW
kVARh L1 MSW
kVARh L2 LSW
kVARh L2 MSW
kVARh L3 LSW
kVARh L3 MSW
kVAR L1
kVAR L2
kVAR L3
kVAh L1 LSW
kVAh L1 MSW
kVAh L2 LSW
kVAh L2 MSW
kVAh L3 LSW
kVAh L3 MSW
kVA L1
kVA L2
kVA L3
Displacement PF L1
Displacement PF L2
Displacement PF L3
Apparent PF L1
Apparent PF L2
Apparent PF L3
Amps L1
Amps L2
Amps L3
Volts L1 to Neutral
Volts L2 to Neutral
Volts L3 to Neutral
Time Since Reset LSW
Time Since Reset MSW""".split('\n')]
base_reg = 4000
meters = 6
veris_kW_scalar= [0.001,
0.001,
0.001,
0.001,
0.001,
0.001]
A_scalar = [0.01,
0.1,
0.1,
0.1,
1,
1,
1]
V_scalar = [0.1,
0.1,
0.1,
0.1,
| 1,
| 1,
1]
PF_scalar = [0.01,
0.01,
0.01,
0.01,
0.01,
0.01]
kW_scalar = [0.00001,
0.001,
0.1,
1,
10,
100]
V_scalar = [0.1,
0.1,
0.1,
0.1,
1,
1,
1]
def __init__(self,baseAddress=1):
mdbus.SerialModbusClient.__init__(self)
self.baseAddress = baseAddress
def ident(self,meter):
addr = self.baseAddress + meter
idstr = self.doRequest(
mdbus.makeIdent(addr),
mdbus.decodeAscii)
self.log.info ("Meter %d, ident string: %s (rev 0x%02x, act 0x%02x)"%\
(meter, idstr[2:], ord(idstr[0]), ord(idstr[1])))
def setScaling(self,meter,scale):
raise Exception("Warning requires the PS18 reboot!")
addr = self.baseAddress + meter
self.doRequest(
mdbus.makeWriteReg(
addr, 4301, scale))
def _scale(self, data, meter):
for key in data.keys():
if 'kW' in key or 'kVA' in key:
data[key] = PowerScoutClient.kW_scalar[self.scalar[meter]] * data[key]
elif 'PF' in key:
data[key] = PowerScoutClient.PF_scalar[self.scalar[meter]] * data[key]
elif 'Amps' in key:
data[key] = PowerScoutClient.A_scalar[self.scalar[meter]] * data[key]
elif 'Volts' in key:
data[key] = PowerScoutClient.V_scalar[self.scalar[meter]] * data[key]
elif 'Frequency' in key:
data[key] = 0.01 * data[key]
def readAll (self, meter):
addr = self.baseAddress + meter
#print PowerScoutClient.reg_names
#print "reading", len(PowerScoutClient.reg_names), 'starting from', PowerScoutClient.base_reg
data = self.doRequest(
mdbus.makeReadReg(
addr, PowerScoutClient.base_reg, len(PowerScoutClient.reg_names), fnc=3),
mdbus.decodeInt16)
res = {}
for i in range (0, len(PowerScoutClient.reg_names)):
if 'LSW' == PowerScoutClient.reg_names[i][-3:]:
name = PowerScoutClient.reg_names[i][:-4]
if not name in res:
res[name] = 0
res[name] += data[i]
elif 'MSW' == PowerScoutClient.reg_names[i][-3:]:
name = PowerScoutClient.reg_names[i][:-4]
if not name in res:
res[name] = 0
res[name] += (data[i]<<16)
else:
res[PowerScoutClient.reg_names[i]] = data[i]
self._scale(res, meter)
return res
def formatString(self, data):
output = []
cnt = 0
groups = ['L1','L2','L3', 'System']
remain = data.keys()
for group in groups:
cnt = 0
k = data.keys()
k.sort()
for key in k:
if not 'to' in key and group in key:
cnt += 1
output.append ("%27s = %9.3f"%(key,data[key]))
remain.remove(key)
if cnt >= 3:
output.append("\n")
cnt = 0
output.append('\n')
cnt = 0
for key in remain:
cnt += 1
output.append ("%27s = %9.3f"%(key,data[key]))
if cnt >= 3:
output.append("\n")
cnt = 0
return "".join(output)
def open(self, port='/dev/ttyUSB0', baudrate=9600):
mdbus.SerialModbusClient.open(self,port,baudrate)
self.scalar = [1]*6
self.ctValue = [0]*6
for meter in range(0,6):
#self.ident(meter)
#model = self.getModelName(meter)
#mnum = self.getModelNumber(meter)
#self.log.debug("Model name: %s Model number: %s"%(model,mnum))
addr = self.baseAddress + meter
#ensure DENT format
#self.doRequest(
# mdbus.makeWriteReg(
# addr, 4525, 0))
#self.ident(meter)
#self.log ("setScaling to 1")
#self.setScaling(meter, 1)
#self.log.info('addr is %d'%addr)
tmp = self.doRequest(
mdbus.makeReadReg(
addr, 4300, 2, fnc=3), mdbus.decodeInt16)
self.ctValue[meter] = tmp[0]
self.scalar[meter] = tmp[1]
self.log.info("Meter %d using CT: %d and data scalar: %d"%\
(meter, self.ctValue[meter], self.scalar[meter]))
def sync(self, seconds):
self.doRequest(
mdbus.makeWriteReg(
0xff, 128, seconds))
def getModelName(self,meter):
addr = self.baseAddress + meter
return self.doRequest(
mdbus.makeReadReg(
addr, 4200, 5, fnc=3),mdbus.decodeAscii)
def getModelNumber(self,meter):
addr = self.baseAddress + meter
return self.doRequest(
mdbus.makeReadReg(
addr, 4205, 5, fnc=3),mdbus.decodeAscii)
def getPower(self,meter):
"""returns power in watts for each of L1, L2, and L3"""
addr = self.baseAddress + meter
r = []
res = self.doRequest(
mdbus.makeReadReg(
#addr, 4009, 3, fnc=3),mdbus.decodeInt16)
addr, 4028, 3, fnc=3),mdbus.decodeInt16)
return [PowerScoutClient.kW_scalar[self.scalar[meter]]*i for i in res]
def getVerisPower(self,meter):
"""returns power in watts for each of L1, L2, and L3"""
addr = self.baseAddress + meter
r = []
res = self.doRequest(
mdbus.makeReadReg(
#addr, 4009, 3, fnc=3),mdbus.decodeInt16)
addr, 9, 3, fnc=3),mdbus.decodeInt16)
return [1000.0*PowerScoutClient.veris_kW_scalar[self.scalar[meter]]*i for i in res]
def getPf (self,meter):
addr = self.baseAddress + meter
r = []
res = self.doRequest(
mdbus.makeRea |
sinotradition/meridian | meridian/acupoints/xiajuxu441.py | Python | apache-2.0 | 241 | 0.034483 | #!/us | r/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
SPELL=u'xiàjùxū'
CN=u'下巨虚'
NAME=u'xiajuxu441'
CHANNEL='stomach'
CHANNEL_FULLNAME='StomachChannelofFoot-Yangming'
S | EQ='ST39'
if __name__ == '__main__':
pass
|
dtulyakov/py | intuit/test7.py | Python | gpl-2.0 | 281 | 0.007117 | #! | /usr/bin/env python2
# -*- coding: utf-8 -*-
import os, sys
print 3 < 4 < 6, "3 < 4 < 6"
print 3 >= 5, "3 >= 5"
print 4 == 4, "4 == 4"
print 4 != 4, "4 != 4"
for i, j in (0, 0), (0, 1), (1, 0), (1, 1):
print i, j, ":", i & j, i | j | , i ^ j
pi = 3.1415926535897931
print pi ** 40
|
Pholey/vcfx | setup.py | Python | mit | 531 | 0 | #!/us | r/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from imp import load_source
import sys
setup(
name='vcfx',
version=load_source('', 'vcfx/__version__.py').__version__,
description='A Python 3 Vcard parser.',
author='Cassidy Bridges',
author_email='cassidybridges@gmail.com',
url='http://github.com/pholey/vcfx',
packages=find_packages('.'),
install_requires=[
'pydash',
"six",
],
extr | as_require={
'test': ['pytest']
},
)
|
adijo/rosalind | old/hamming_distance.py | Python | gpl-2.0 | 100 | 0.08 | def | hamming(s,t):
dist = 0
for x in range(len(s)):
if s[x]! | =t[x]:
dist+=1
return dist
|
ajwillia/mu | mu/resources/api.py | Python | gpl-3.0 | 40,706 | 0.005527 | """
Contains definitions for the MicroPython micro:bit related APIs so they can be
used in the editor for autocomplete and call tips.
Copyright (c) 2015-2016 Nicholas H.Tollervey and others (see the AUTHORS file).
Based upon work done for Puppy IDE by Dan Pope, Nicholas Tollervey and Damien
George.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# Based upon the help text found in source/microbit/help.c in the
# microbit-micropython repos.
MICROPYTHON_APIS = [
# System state objects.
"microbit.panic() \nPut micro:bit in panic() mode and display an unhappy face.\nPress the reset button to exit panic() mode.",
"microbit.sleep(time) \nPut micro:bit to sleep for some milliseconds (1 second = 1000 ms) of time.\nsleep(2000) gives micro:bit a 2 second nap.",
"microbit.running_time() \nReturn running_time() in milliseconds since micro:bit's last reset.",
"microbit.temperature() \nReturn micro:bit's temperature in degrees Celcius.",
# Accelerometer 3D orientation
"microbit.accelerometer.get_x() \nReturn micro:bit's tilt (X acceleration) in milli-g's.",
"microbit.accelerometer.get_y() \nReturn micro:bit's tilt (Y acceleration) in milli-g's.",
"microbit.accelerometer.get_z() \nReturn micro:bit's up-down motion (Z acceleration) in milli-g's.\nZ is a positive number when moving up. Moving down, Z is a negative number.",
"microbit.accelerometer.is_gesture(name) \nReturn True or False to indicate if the named gesture is currently active.\nMicroPython understands the following gestures: 'up', 'down', 'left', 'right',\n'face up', 'face down', 'freefall', '3g', '6g', '8g' and 'shake'.",
"microbit.accelerometer.was_gesture(name) \nReturn True or False to indicate if the named gesture was active since the\nlast call.\nMicroPython understands the following gestures: 'up', 'down', 'left', 'right',\n'face up', 'face down', 'freefall', '3g', '6g', '8g' and 'shake'.",
"microbit.accelerometer.get_gestures() \nReturn a list indicating the gesture history. The most recent gesture is last.\nCalling this method also clears the gesture history.\nMicroPython understands the following gestures: 'up', 'down', 'left', 'right',\n'face up', 'face down', 'freefall', '3g', '6g', '8g' and 'shake'.",
# Pushbutton
"microbit.button_a.is_pressed() \nIf button A is pressed down, is_pressed() is True, else False.",
"microbit.button_a.was_pressed() \nUse was_pressed() to learn if button A was pressed since the last time\nwas_pressed() was called. Returns True or False.",
"microbit.button_a.get_presses() \nUse get_presses() to get the running total of button presses, and also\nreset this counter to zero.",
"microbit.button_b.is_pressed() \nIf button B is pressed down, is_pressed() is True, else False.",
"microbit.button_b.was_pressed() \nUse was_pressed() to learn if button B was pressed since the last time\nwas_pressed() was called. Returns True or False.",
"microbit.button_b.get_presses() \nUse get_presses() to get the running total of button presses, and also\nreset this counter to zero.",
# Compass 3D direction heading
"microbit.compass.is_calibrated() \nIf micro:bit's compass is_calibrated() and adjusted for accuracy, return True.\nIf compass hasn't been adjusted for accuracy, return False.",
"microbit.compass.calibrate() \nIf micro:bit is confused, calibrate() the compass to adjust the its accuracy.\nWill ask you to rotate the device to draw a circle on the display. Afterwards, micro:bit will know which way is north.",
"microbit.compass.clear_calibration() \nReset micro:bit's compass using clear_ca | libration() command.\ | nRun calibrate() to improve accuracy.",
"microbit.compass.get_x() \nReturn magnetic field detected along micro:bit's X axis.\nUsually, the compass returns the earth's magnetic field in micro-Tesla units.\nUnless...a strong magnet is nearby!",
"microbit.compass.get_y() \nReturn magnetic field detected along micro:bit's Y axis.\nUsually, the compass returns the earth's magnetic field in micro-Tesla units.\nUnless...a strong magnet is nearby!",
"microbit.compass.get_z() \nReturn magnetic field detected along micro:bit's Z axis.\nUsually, the compass returns the earth's magnetic field in micro-Tesla units.\nUnless...a strong magnet is nearby!",
"microbit.compass.get_field_strength() \nReturn strength of magnetic field around micro:bit.",
"microbit.compass.heading() \nReturn a number between 0-360 indicating the device's heading. 0 is north.",
# Display 5x5 LED grid
"microbit.display.show(x, delay=400, wait=True, loop=False, clear=False) \nUse show(x) to print the string or image 'x' to the display. If 'x' is a list\nof images they will be animated together.\nUse 'delay' to specify the speed of frame changes in milliseconds.\nIf wait is False animation will happen in the background while the program continues.\nIf loop is True the animation will repeat forever.\nIf clear is True the display will clear at the end of the animation.",
"microbit.display.scroll(string, delay=150, wait=True, loop=False, monospace=False) \nUse scroll(string) to scroll the string across the display.\nUse delay to control how fast the text scrolls.\nIf wait is False the text will scroll in the background while the program continues.\nIf loop is True the text will repeat forever.\nIf monospace is True the characters will always take up 5 pixel-columns.",
"microbit.display.clear() \nUse clear() to clear micro:bit's display.",
"microbit.display.get_pixel(x, y) \nUse get_pixel(x, y) to return the display's brightness at LED pixel (x,y).\nBrightness can be from 0 (LED is off) to 9 (maximum LED brightness).",
"microbit.display.set_pixel(x, y, b) \nUse set_pixel(x, y, b) to set the display at LED pixel (x,y) to brightness 'b'\nwhich can be set between 0 (off) to 9 (full brightness).",
"microbit.display.on() \nUse on() to turn on the display.",
"microbit.display.off() \nUse off() to turn off the display.",
"microbit.display.is_on() \nUse is_on() to query if the micro:bit's display is on (True) or off (False).",
# Pins
"microbit.pin0.is_touched() \nIf pin0 is_touched() on micro:bit, return True. If nothing is touching the\npin, return False.",
"microbit.pin0.read_digital() \nread_digital() value from pin0. The reading will be either 0 (lo) or 1 (hi).",
"microbit.pin0.write_digital(value) \nSet pin0 to output high if value is 1, or to low, it it is 0.",
"microbit.pin0.read_analog() \nRead the voltage applied to pin0. Return the reading as a number between\n0 (meaning 0v) and 1023 (meaning 3.3v).",
"microbit.pin0.write_analog(value) \nSet pin0 to output a value between 0 and 1023.",
"microbit.pin0.set_analog_period(period) \nSet the period of the PWM signal output to period milliseconds.",
"microbit.pin0.set_analog_period_microseconds(period) \nSet the period of the PWM signal output to period microseconds.",
"microbit.pin1.is_touched() \nIf pin1 is_touched() on micro:bit, return True. If nothing is touching the\npin, return False.",
"microbit.pin1.read_digital() \nread_digital() value from pin1. The reading will be either 0 (lo) or 1 (hi).",
"microbit.pin1.write_digital(value) \nSet pin1 to output high if value is 1, or to low, it it is 0.",
"microbit.pin1.read_analog() \nRead the voltage applied to pin1. Return the reading as a number between\n0 (meaning 0v) and 1023 (meaning 3.3v).",
"microbit.pin1.write_analog(value) \nSet pin1 to output a value between 0 and 1023.",
"microbit.pin1.set_analog_period(period) \nSet the period of t |
eamontoyaa/pyCSS | validations/validation03-comparisonZhao.etal.,2014.py | Python | bsd-2-clause | 5,013 | 0.010575 | '''
# Description.
This is a minimal module in order to perform a circular arc slope stability
analysis by the limit equilibrium model by Fellenius and Bishop symplified
methods.
'''
#------------------------------------------------------------------------------
## Add functions directory
import sys
sys.path += ['../functions']
#------------------------------------------------------------------------------
## Modules/Functions import
import numpy as np
import time
from automaticslipcircles import automaticslipcircles
from onlyonecircle import onlyonecircle
#------------------------------------------------------------------------------
## Poject data
projectName = 'Validation-03'
projectAuthor = 'Exneyder A. Montoya Araque'
projectDate = time.strftime("%d/%m/%y")
#------------------------------------------------------------------------------
## Define inputs
# The slope geometry
slopeHeight = [10, 'm']
slopeDip = np.array([2, 1])
crownDist = [5, 'm']
toeDist = [5, 'm']
wantAutomaticToeDepth = False
if wantAutomaticToeDepth == True:
toeDepth = ['automatic toe Depth']
else:
toeDepth = [3, 'm']
# The slip arc-circle
wantEvaluateOnlyOneSurface = True
if wantEvaluateOnlyOneSurface == True:
hztDistPointAtCrownFromCrown = [-2, 'm']
hztDistPointAtToeFromCrown = [20, 'm']
slipRadius = [34.95, 'm']
else:
numCircles = 2000
radiusIncrement = [2, 'm']
numberIncrements = 40
maxFsValueCont = 2
# Watertable
wantWatertable = False
if wantWatertable == True:
wtDepthAtCrown = [0, 'm']
else:
wtDepthAtCrown = ['No watertable']
toeUnderWatertable = False
# Materials properties.
waterUnitWeight = [0, 'kN/m3']
materialUnitWeight = [20, 'kN/m3']
frictionAngleGrad = [19.6, 'degrees']
cohesion = [3, 'kPa']
## Advanced inputs
# Want divide the slip surface in constant width slices?
wantConstSliceWidthTrue = False
# Number of discretizations of slip surface.
numSlices = 15
# Number of discretizations of circular arcs.
nDivs = numSlices
# Select the method to calcualte the safety factor ['Flns', 'Bshp' or 'Allm'].
methodString = 'Allm'
# Select the output format image #['.eps', '.jpeg', '.jpg', '.pdf', '.pgf', \
# '.png', '.ps', '.raw', '.rgba', '.svg', '.svgz', '.tif', '.tiff'].
outputFormatImg = '.svg'
#------------------------------------------------------------------------------
# Operations for only one slip surface
if wantEvaluateOnlyOneSurface == True:
msg = onlyonecircle(projectName, projectAuthor, projectDate, slopeHeight, \
slopeDip, crownDist, toeDist, wantAutomaticToeDepth, toeDepth, \
hztDistPointAtCrownFromCrown, hztDistPointAtToeFromCrown, \
slipRadius, wantWatertable, wtDepthAtCrown, toeUnderWatertable, \
waterUnitWeight, materialUnitWeight, frictionAngleGrad, cohesion, \
wantConstSliceWidthTrue, numSlices, nDivs, methodString, \
outputFormatImg)
#------------------------------------------------------------------------------
# Operations for multiple slip surfa | ce
else:
automaticslipcircles(projectName, projectAuthor, projectDate, slopeHeight,\
slopeDip, crownDist, toeDist, wantAutomaticToeDepth, toeDepth, \
numCircles, radiusIncrement, numberIncrements, maxFsValueCont, \
wantWatertable, wtDepthAtCrown, toeUnderWatertable, waterUnitWeight, \
materia | lUnitWeight, frictionAngleGrad, cohesion, \
wantConstSliceWidthTrue, numSlices, nDivs, methodString, \
outputFormatImg)
'''
BSD 2 license.
Copyright (c) 2016, Universidad Nacional de Colombia, Ludger O.
Suarez-Burgoa and Exneyder Andrés Montoya Araque.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
|
mshafir/virtual-reality-camera | virtual_camera_server.py | Python | mit | 1,299 | 0.013857 | import sys
PI = len(sys.argv) == 1
if PI:
from VirtualCamera import VirtualCamera
import time
import flask
from flask import Flask
from flask import render_template
if PI:
camera = VirtualCamera()
sweep = camera.capture_sweep()
app = Flask(__name__)
if PI:
START = camera.motor_start
END = int(camera.motor_end / camera.motor_inc)
else:
START = 1
END = 18
def get_shot(pos):
global sweep, START, END
pos = int(pos)
if pos < START:
pos = START
if pos > END:
pos = END
return sweep[END-pos+START]
@app.route('/')
def home():
global START, END
return render_template('index.html', start=START, end=END, caching='true')
| @app.route('/left/<pos>')
def left(pos):
global PI
pos = int | (pos)
if PI:
shot = get_shot(pos)[0]
return flask.send_file(shot, mimetype='image/jpeg')
else:
return flask.send_file('images/img'+str(19-pos)+'.jpg', mimetype='image/jpeg')
@app.route('/right/<pos>')
def right(pos):
global PI
pos = int(pos)
if PI:
shot = get_shot(pos)[1]
return flask.send_file(shot, mimetype='image/jpeg')
else:
return flask.send_file('images/img'+str(20-pos)+'.jpg', mimetype='image/jpeg')
app.run(host='0.0.0.0')
if PI:
camera.cleanup()
|
shoyer/numpy | numpy/doc/basics.py | Python | bsd-3-clause | 11,252 | 0.000444 | """
============
Array basics
============
Array types and conversions between types
=========================================
NumPy supports a much greater variety of numerical types than Python does.
This section shows which are available, and how to modify an array's data-type.
The primitive types supported are tied closely to those in C:
.. list-table::
:header-rows: 1
* - Numpy type
- C type
- Description
* - `np.bool`
- ``bool``
- Boolean (True or False) stored as a byte
* - `np.byte`
- ``signed char``
- Platform-defined
* - `np.ubyte`
- ``unsigned char``
- Platform-defined
* - `np.short`
- ``short``
- Platform-defined
* - `np.ushor | t`
- ``unsigned short``
- Platform-defined
* - `np.intc`
- ``int``
- Platform-defined
* - `np.uintc`
- ``unsig | ned int``
- Platform-defined
* - `np.int_`
- ``long``
- Platform-defined
* - `np.uint`
- ``unsigned long``
- Platform-defined
* - `np.longlong`
- ``long long``
- Platform-defined
* - `np.ulonglong`
- ``unsigned long long``
- Platform-defined
* - `np.half` / `np.float16`
-
- Half precision float:
sign bit, 5 bits exponent, 10 bits mantissa
* - `np.single`
- ``float``
- Platform-defined single precision float:
typically sign bit, 8 bits exponent, 23 bits mantissa
* - `np.double`
- ``double``
- Platform-defined double precision float:
typically sign bit, 11 bits exponent, 52 bits mantissa.
* - `np.longdouble`
- ``long double``
- Platform-defined extended-precision float
* - `np.csingle`
- ``float complex``
- Complex number, represented by two single-precision floats (real and imaginary components)
* - `np.cdouble`
- ``double complex``
- Complex number, represented by two double-precision floats (real and imaginary components).
* - `np.clongdouble`
- ``long double complex``
- Complex number, represented by two extended-precision floats (real and imaginary components).
Since many of these have platform-dependent definitions, a set of fixed-size
aliases are provided:
.. list-table::
:header-rows: 1
* - Numpy type
- C type
- Description
* - `np.int8`
- ``int8_t``
- Byte (-128 to 127)
* - `np.int16`
- ``int16_t``
- Integer (-32768 to 32767)
* - `np.int32`
- ``int32_t``
- Integer (-2147483648 to 2147483647)
* - `np.int64`
- ``int64_t``
- Integer (-9223372036854775808 to 9223372036854775807)
* - `np.uint8`
- ``uint8_t``
- Unsigned integer (0 to 255)
* - `np.uint16`
- ``uint16_t``
- Unsigned integer (0 to 65535)
* - `np.uint32`
- ``uint32_t``
- Unsigned integer (0 to 4294967295)
* - `np.uint64`
- ``uint64_t``
- Unsigned integer (0 to 18446744073709551615)
* - `np.intp`
- ``intptr_t``
- Integer used for indexing, typically the same as ``ssize_t``
* - `np.uintp`
- ``uintptr_t``
- Integer large enough to hold a pointer
* - `np.float32`
- ``float``
-
* - `np.float64` / `np.float_`
- ``double``
- Note that this matches the precision of the builtin python `float`.
* - `np.complex64`
- ``float complex``
- Complex number, represented by two 32-bit floats (real and imaginary components)
* - `np.complex128` / `np.complex_`
- ``double complex``
- Note that this matches the precision of the builtin python `complex`.
NumPy numerical types are instances of ``dtype`` (data-type) objects, each
having unique characteristics. Once you have imported NumPy using
::
>>> import numpy as np
the dtypes are available as ``np.bool_``, ``np.float32``, etc.
Advanced types, not listed in the table above, are explored in
section :ref:`structured_arrays`.
There are 5 basic numerical types representing booleans (bool), integers (int),
unsigned integers (uint) floating point (float) and complex. Those with numbers
in their name indicate the bitsize of the type (i.e. how many bits are needed
to represent a single value in memory). Some types, such as ``int`` and
``intp``, have differing bitsizes, dependent on the platforms (e.g. 32-bit
vs. 64-bit machines). This should be taken into account when interfacing
with low-level code (such as C or Fortran) where the raw memory is addressed.
Data-types can be used as functions to convert python numbers to array scalars
(see the array scalar section for an explanation), python sequences of numbers
to arrays of that type, or as arguments to the dtype keyword that many numpy
functions or methods accept. Some examples::
>>> import numpy as np
>>> x = np.float32(1.0)
>>> x
1.0
>>> y = np.int_([1,2,4])
>>> y
array([1, 2, 4])
>>> z = np.arange(3, dtype=np.uint8)
>>> z
array([0, 1, 2], dtype=uint8)
Array types can also be referred to by character codes, mostly to retain
backward compatibility with older packages such as Numeric. Some
documentation may still refer to these, for example::
>>> np.array([1, 2, 3], dtype='f')
array([ 1., 2., 3.], dtype=float32)
We recommend using dtype objects instead.
To convert the type of an array, use the .astype() method (preferred) or
the type itself as a function. For example: ::
>>> z.astype(float) #doctest: +NORMALIZE_WHITESPACE
array([ 0., 1., 2.])
>>> np.int8(z)
array([0, 1, 2], dtype=int8)
Note that, above, we use the *Python* float object as a dtype. NumPy knows
that ``int`` refers to ``np.int_``, ``bool`` means ``np.bool_``,
that ``float`` is ``np.float_`` and ``complex`` is ``np.complex_``.
The other data-types do not have Python equivalents.
To determine the type of an array, look at the dtype attribute::
>>> z.dtype
dtype('uint8')
dtype objects also contain information about the type, such as its bit-width
and its byte-order. The data type can also be used indirectly to query
properties of the type, such as whether it is an integer::
>>> d = np.dtype(int)
>>> d
dtype('int32')
>>> np.issubdtype(d, np.integer)
True
>>> np.issubdtype(d, np.floating)
False
Array Scalars
=============
NumPy generally returns elements of arrays as array scalars (a scalar
with an associated dtype). Array scalars differ from Python scalars, but
for the most part they can be used interchangeably (the primary
exception is for versions of Python older than v2.x, where integer array
scalars cannot act as indices for lists and tuples). There are some
exceptions, such as when code requires very specific attributes of a scalar
or when it checks specifically whether a value is a Python scalar. Generally,
problems are easily fixed by explicitly converting array scalars
to Python scalars, using the corresponding Python type function
(e.g., ``int``, ``float``, ``complex``, ``str``, ``unicode``).
The primary advantage of using array scalars is that
they preserve the array type (Python may not have a matching scalar type
available, e.g. ``int16``). Therefore, the use of array scalars ensures
identical behaviour between arrays and scalars, irrespective of whether the
value is inside an array or not. NumPy scalars also have many of the same
methods arrays do.
Overflow Errors
===============
The fixed size of NumPy numeric types may cause overflow errors when a value
requires more memory than available in the data type. For example,
`numpy.power` evaluates ``100 * 10 ** 8`` correctly for 64-bit integers,
but gives 1874919424 (incorrect) for a 32-bit integer.
>>> np.power(100, 8, dtype=np.int64)
10000000000000000
>>> np.power(100, 8, dtype=np.int32)
1874919424
The behaviour of NumPy and Python integer types differs significantly for
integer overflows and may confuse users expecting NumPy integers to behave
similar to Python's ``int``. Unlike NumPy, the size of Python's ``int`` is
flexible. This means Python i |
pistruiatul/hartapoliticii | python/src/ro/vivi/youtube_crawler/gdata/spreadsheets/data.py | Python | agpl-3.0 | 9,070 | 0.009592 | #!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distribu | ted under t | he License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
"""Provides classes and constants for the XML in the Google Spreadsheets API.
Documentation for the raw XML which these classes represent can be found here:
http://code.google.com/apis/spreadsheets/docs/3.0/reference.html#Elements
"""
__author__ = 'j.s@google.com (Jeff Scudder)'
import atom.core
import gdata.data
GS_TEMPLATE = '{http://schemas.google.com/spreadsheets/2006}%s'
GSX_NAMESPACE = 'http://schemas.google.com/spreadsheets/2006/extended'
INSERT_MODE = 'insert'
OVERWRITE_MODE = 'overwrite'
WORKSHEETS_REL = 'http://schemas.google.com/spreadsheets/2006#worksheetsfeed'
class Error(Exception):
pass
class FieldMissing(Exception):
pass
class HeaderNotSet(Error):
"""The desired column header had no value for the row in the list feed."""
class Cell(atom.core.XmlElement):
"""The gs:cell element.
A cell in the worksheet. The <gs:cell> element can appear only as a child
of <atom:entry>.
"""
_qname = GS_TEMPLATE % 'cell'
col = 'col'
input_value = 'inputValue'
numeric_value = 'numericValue'
row = 'row'
class ColCount(atom.core.XmlElement):
"""The gs:colCount element.
Indicates the number of columns in the worksheet, including columns that
contain only empty cells. The <gs:colCount> element can appear as a child
of <atom:entry> or <atom:feed>
"""
_qname = GS_TEMPLATE % 'colCount'
class Field(atom.core.XmlElement):
"""The gs:field element.
A field single cell within a record. Contained in an <atom:entry>.
"""
_qname = GS_TEMPLATE % 'field'
index = 'index'
name = 'name'
class Column(Field):
"""The gs:column element."""
_qname = GS_TEMPLATE % 'column'
class Data(atom.core.XmlElement):
"""The gs:data element.
A data region of a table. Contained in an <atom:entry> element.
"""
_qname = GS_TEMPLATE % 'data'
column = [Column]
insertion_mode = 'insertionMode'
num_rows = 'numRows'
start_row = 'startRow'
class Header(atom.core.XmlElement):
"""The gs:header element.
Indicates which row is the header row. Contained in an <atom:entry>.
"""
_qname = GS_TEMPLATE % 'header'
row = 'row'
class RowCount(atom.core.XmlElement):
"""The gs:rowCount element.
Indicates the number of total rows in the worksheet, including rows that
contain only empty cells. The <gs:rowCount> element can appear as a
child of <atom:entry> or <atom:feed>.
"""
_qname = GS_TEMPLATE % 'rowCount'
class Worksheet(atom.core.XmlElement):
"""The gs:worksheet element.
The worksheet where the table lives.Contained in an <atom:entry>.
"""
_qname = GS_TEMPLATE % 'worksheet'
name = 'name'
class Spreadsheet(gdata.data.GDEntry):
"""An Atom entry which represents a Google Spreadsheet."""
def find_worksheets_feed(self):
return self.find_url(WORKSHEETS_REL)
FindWorksheetsFeed = find_worksheets_feed
class SpreadsheetsFeed(gdata.data.GDFeed):
"""An Atom feed listing a user's Google Spreadsheets."""
entry = [Spreadsheet]
class WorksheetEntry(gdata.data.GDEntry):
"""An Atom entry representing a single worksheet in a spreadsheet."""
row_count = RowCount
col_count = ColCount
class WorksheetsFeed(gdata.data.GDFeed):
"""A feed containing the worksheets in a single spreadsheet."""
entry = [Worksheet]
class Table(gdata.data.GDEntry):
"""An Atom entry that represents a subsection of a worksheet.
A table allows you to treat part or all of a worksheet somewhat like a
table in a database that is, as a set of structured data items. Tables
don't exist until you explicitly create them before you can use a table
feed, you have to explicitly define where the table data comes from.
"""
data = Data
header = Header
worksheet = Worksheet
def get_table_id(self):
if self.id.text:
return self.id.text.split('/')[-1]
return None
GetTableId = get_table_id
class TablesFeed(gdata.data.GDFeed):
"""An Atom feed containing the tables defined within a worksheet."""
entry = [Table]
class Record(gdata.data.GDEntry):
"""An Atom entry representing a single record in a table.
Note that the order of items in each record is the same as the order of
columns in the table definition, which may not match the order of
columns in the GUI.
"""
field = [Field]
def value_for_index(self, column_index):
for field in self.field:
if field.index == column_index:
return field.text
raise FieldMissing('There is no field for %s' % column_index)
ValueForIndex = value_for_index
def value_for_name(self, name):
for field in self.field:
if field.name == name:
return field.text
raise FieldMissing('There is no field for %s' % name)
ValueForName = value_for_name
class RecordsFeed(gdata.data.GDFeed):
"""An Atom feed containing the individuals records in a table."""
entry = [Record]
class ListRow(atom.core.XmlElement):
"""A gsx column value within a row.
The local tag in the _qname is blank and must be set to the column
name. For example, when adding to a ListEntry, do:
col_value = ListRow(text='something')
col_value._qname = col_value._qname % 'mycolumnname'
"""
_qname = '{http://schemas.google.com/spreadsheets/2006/extended}%s'
class ListEntry(gdata.data.GDEntry):
"""An Atom entry representing a worksheet row in the list feed.
The values for a particular column can be get and set using
x.get_value('columnheader') and x.set_value('columnheader', 'value').
See also the explanation of column names in the ListFeed class.
"""
def get_value(self, column_name):
"""Returns the displayed text for the desired column in this row.
The formula or input which generated the displayed value is not accessible
through the list feed, to see the user's input, use the cells feed.
If a column is not present in this spreadsheet, or there is no value
for a column in this row, this method will return None.
"""
values = self.get_elements(column_name, GSX_NAMESPACE)
if len(values) == 0:
return None
return values[0].text
def set_value(self, column_name, value):
"""Changes the value of cell in this row under the desired column name.
Warning: if the cell contained a formula, it will be wiped out by setting
the value using the list feed since the list feed only works with
displayed values.
No client side checking is performed on the column_name, you need to
ensure that the column_name is the local tag name in the gsx tag for the
column. For example, the column_name will not contain special characters,
spaces, uppercase letters, etc.
"""
# Try to find the column in this row to change an existing value.
values = self.get_elements(column_name, GSX_NAMESPACE)
if len(values) > 0:
values[0].text = value
else:
# There is no value in this row for the desired column, so add a new
# gsx:column_name element.
new_value = ListRow(text=value)
new_value._qname = new_value._qname % (column_name,)
self._other_elements.append(new_value)
class ListsFeed(gdata.data.GDFeed):
"""An Atom feed in which each entry represents a row in a worksheet.
The first row in the worksheet is used as the column names for the values
in each row. If a header cell is empty, then a unique column ID is used
for the gsx element name.
Spaces in a column name are removed from the name of the corresponding
gsx element.
Caution: The columnNames are case-insensitive. For example, if you see
a <g |
fogcitymarathoner/djfb | facebook_example/django_facebook/canvas.py | Python | bsd-3-clause | 728 | 0 | from django.http import QueryDict
from django_facebook import settings as facebook_settings
def generate_oauth_url(scope=facebook_settings.FACEBOOK_DEFAULT_SCOPE,
next=None, extra_data=None):
query_dict = QueryDict('', True)
canvas_page = (next if next is not None else
| facebook_se | ttings.FACEBOOK_CANVAS_PAGE)
query_dict.update(dict(client_id=facebook_settings.FACEBOOK_APP_ID,
redirect_uri=canvas_page,
scope=','.join(scope)))
if extra_data:
query_dict.update(extra_data)
auth_url = 'https://www.facebook.com/dialog/oauth?%s' % (
query_dict.urlencode(), )
return auth_url
|
mstriemer/amo-validator | validator/testcases/javascript/call_definitions.py | Python | bsd-3-clause | 12,988 | 0.000231 | import math
import re
import actions
import predefinedentities
from jstypes import JSArray, JSObject, JSWrapper
# Function prototypes should implement the following:
# wrapper : The JSWrapper instace that is being called
# arguments : A list of argument nodes; untraversed
# traverser : The current traverser object
def webbrowserpersist(wrapper, arguments, traverser):
"""
Most nsIWebBrowserPersist should no longer be used, in favor of the new
Downloads.jsm interfaces.
"""
traverser.err.warning(
err_id=('testcases_javascript_call_definititions',
'webbrowserpersist'),
warning='nsIWebBrowserPersist should no longer be used',
description=('Most nsIWebBrowserPersist methods have been '
'superseded by simpler methods in Downloads.jsm, namely '
'`Downloads.fetch` and `Downloads.createDownload`. See '
'http://mzl.la/downloads-jsm for more information.'),
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
tier=4)
def webbrowserpersist_saveuri(wrapper, arguments, traverser):
"""
nsIWebBrowserPersist.saveURI requires a valid privacy context as
of Firefox 19
"""
if len(arguments) >= 7:
load_context = traverser._traverse_node(arguments[6])
if load_context.get_literal_value() is None:
traverser.err.warning(
err_id=('testcases_javascript_call_definititions',
'webbrowserpersist_saveuri'),
warning=('saveURI should not be called with a null load '
'context'),
description=('While nsIWebBrowserPersist.saveURI accepts null '
'in place of a privacy context, this usage is '
'acceptable only when no appropriate load '
'context exists.'),
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
tier=4)
webbrowserpersist(wrapper, arguments, traverser)
def xpcom_constructor(method, extend=False, mutate=False, pretraversed=False):
"""Returns a function which wraps an XPCOM class instantiation function."""
def definition(wrapper, arguments, traverser):
"""Wraps an XPCOM class instantiation function."""
if not arguments:
return None
traverser._debug('(XPCOM Encountered)')
if not pretraversed:
arguments = [traverser._traverse_node(x) for x in arguments]
argz = arguments[0]
if not argz.is_global or 'xpcom_map' not in argz.value:
argz = JSWrapper(traverser=traverser)
argz.value = {'xpcom_map': lambda: {'value': {}}}
traverser._debug('(Building XPCOM...)')
inst = traverser._build_global(
method, argz.value['xpcom_map']())
inst.value['overwritable'] = True
if extend or mutate:
# FIXME: There should be a way to get this without
# traversing the call chain twice.
parent = actions.trace_member(traverser, wrapper['callee']['object'])
if mutate and not (parent.is_global and
isinstance(parent.value, dict) and
'value' in parent.value):
# Assume that the parent object is a first class
# wrapped native
parent.value = inst.value
# FIXME: Only objects marked as global are processed
# as XPCOM instances
parent.is_global = True
if isinstance(parent.value, dict):
if extend and mutate:
if callable(parent.value['value']):
parent.value['value'] = \
parent.value['value'](t=traverser)
parent.value['value'].update(inst.value['value'])
return parent
if extend:
inst.value['value'].update(parent.value['value'])
if mutate:
parent.value = inst.value
return inst
definition.__name__ = 'xpcom_%s' % str(method)
return definition
# Global object function definitions:
def string_global(wrapper, arguments, traverser):
if not arguments:
return JSWrapper('', traverser=traverser)
arg = traverser._traverse_node(arguments[0])
value = actions._get_as_str(arg.get_literal_value())
return JSWrapper(value, traverser=traverser)
def array_global(wrapper, arguments, traverser):
output = JSArray()
if arguments:
output.elements = [traverser._traverse_node(a) for a in arguments]
return JSWrapper(output, traverser=traverser)
def number_global(wrapper, arguments, traverser):
if not arguments:
return JSWrapper(0, traverser=traverser)
arg = traverser._traverse_node(arguments[0])
try:
value = float(arg.get_literal_value())
except (ValueError, TypeError):
return traverser._build_global(
name='NaN',
entity=predefinedentities.GLOBAL_ENTITIES[u'NaN'])
return JSWrapper(value, traverser=traverser)
def boolean_global(wrapper, arguments, traverser):
if not arguments:
return JSWrapper(False, traverser=traverser)
arg = traverser._traverse_node(arguments[0])
return JSWrapper(bool(arg.get_literal_value()), traverser=traverser)
def python_wrap(func, args, nargs=False):
"""
This is a helper function that wraps Python functions and exposes them to
the JS engine. The first parameter should be the Python function to wrap.
The second parameter should be a list of tuples. Each tuple should
contain:
1. The type of value to expect:
- "string"
- "num"
2. A default value.
"""
def _process_literal(type_, literal):
if type_ == 'string':
return actions._get_as_str(literal)
elif type_ == 'num':
return actions._get_as_num(literal)
return literal
def wrap(wrapper, argum | ents, traverser):
passed_args = [traverser._traverse_node(a) for a in arguments]
params = []
| if not nargs:
# Handle definite argument lists.
for type_, def_value in args:
if passed_args:
parg = passed_args[0]
passed_args = passed_args[1:]
passed_literal = parg.get_literal_value()
passed_literal = _process_literal(type_, passed_literal)
params.append(passed_literal)
else:
params.append(def_value)
else:
# Handle dynamic argument lists.
for arg in passed_args:
literal = arg.get_literal_value()
params.append(_process_literal(args[0], literal))
traverser._debug('Calling wrapped Python function with: (%s)' %
', '.join(map(str, params)))
try:
output = func(*params)
except (ValueError, TypeError, OverflowError):
# If we cannot compute output, just return nothing.
output = None
return JSWrapper(output, traverser=traverser)
return wrap
def math_log(wrapper, arguments, traverser):
"""Return a better value than the standard python log function."""
args = [traverser._traverse_node(a) for a in arguments]
if not args:
return JSWrapper(0, traverser=traverser)
arg = actions._get_as_num(args[0].get_literal_value())
if arg == 0:
return JSWrapper(float('-inf'), traverser=traverser)
if arg < 0:
return JSWrapper(traverser=traverser)
arg = math.log(arg)
return JSWrapper(arg, traverser=traverser)
def math_random(wrapper, arguments, traverser):
"""Return a "random" value for Math.random()."""
return JSWrapper(0.5, traverser=traverser)
def math_rou |
noslenfa/tdjangorest | uw/lib/python2.7/site-packages/paramiko/dsskey.py | Python | apache-2.0 | 6,726 | 0.002081 | # Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
L{DSSKey}
"""
from Crypto.PublicKey import DSA
from Crypto.Hash import SHA
from paramiko.common import *
from paramiko import util
from paramiko.ssh_exception import SSHException
from paramiko.message import Message
from paramiko.ber import BER, BERException
from paramiko.pkey import PKey
class DSSKey (PKey):
"""
Representation of a DSS key which can be used to sign an verify SSH2
data.
"""
def __init__(self, msg=None, data=None, filename=None, password=None, vals=None, file_obj=None):
self.p = None
self.q = None
self.g = None
self.y = None
self.x = None
if file_obj is not None:
self._from_private_key(file_obj, password)
return
if filename is not None:
self._from_private_key_file(filename, password)
return
if (msg is None) and (data is not None):
msg = Message(data)
if vals is not None:
self.p, self.q, self.g, self.y = vals
else:
if msg is None:
raise SSHException('Key object may not be empty')
if msg.get_string() != 'ssh-dss':
raise SSHException('Invalid key')
self.p = msg.get_mpint()
self.q = msg.get_mpint()
self.g = msg.get_mpint()
self.y = msg.get_mpint()
self.size = util.bit_length(self.p)
def __str__(self):
m = Message()
m.add_string('ssh-dss')
m.add_mpint(self.p)
m.add_mpint(self.q)
m.add_mpint(self.g)
m.add_mpint(self.y)
return str(m)
def __hash__(self):
h = hash(self.get_name())
h = h * 37 + hash(self.p)
h = h * 37 + hash(self.q)
h = h * 37 + hash(self.g)
h = h * 37 + hash(self.y)
# h might be a long by now...
return hash(h)
def get_name(self):
return 'ssh-dss'
def get_bits(self):
return self.size
def can_sign(self):
return self.x is not None
def sign_ssh_data(self, rng, data):
digest = SHA.new(data).digest()
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q), long(self.x)))
# generate a suitable k
qsize = len(util.deflate_long(self.q, 0))
while True:
k = util.inflate_long(rng.read(qsize), 1)
if (k > 2) and (k < self.q):
break
r, s = dss.sign(util.inflate_long(digest, 1), k)
m = Message()
m.add_string('ssh-dss')
# apparently, in rare cases, r or s may be shorter than 20 bytes!
rstr = util.deflate_long(r, 0)
sstr = util.deflate_long(s, 0)
if len(rstr) < 20:
rstr = '\x00' * (20 - len(rstr)) + rstr
| if len(sstr) < 20:
sstr = '\x00' * (20 - len(sstr)) + sstr
m.add_string(rstr + sstr)
return m
def verify_ssh_sig(self, data, msg):
if len(str(msg)) == 40:
# spies.com bug: signature has no header
sig = str(msg)
else:
kind = msg.get_string()
if kind != 'ssh-dss':
return 0
sig = msg.g | et_string()
# pull out (r, s) which are NOT encoded as mpints
sigR = util.inflate_long(sig[:20], 1)
sigS = util.inflate_long(sig[20:], 1)
sigM = util.inflate_long(SHA.new(data).digest(), 1)
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q)))
return dss.verify(sigM, (sigR, sigS))
def _encode_key(self):
if self.x is None:
raise SSHException('Not enough key information')
keylist = [ 0, self.p, self.q, self.g, self.y, self.x ]
try:
b = BER()
b.encode(keylist)
except BERException:
raise SSHException('Unable to create ber encoding of key')
return str(b)
def write_private_key_file(self, filename, password=None):
self._write_private_key_file('DSA', filename, self._encode_key(), password)
def write_private_key(self, file_obj, password=None):
self._write_private_key('DSA', file_obj, self._encode_key(), password)
def generate(bits=1024, progress_func=None):
"""
Generate a new private DSS key. This factory function can be used to
generate a new host key or authentication key.
@param bits: number of bits the generated key should be.
@type bits: int
@param progress_func: an optional function to call at key points in
key generation (used by C{pyCrypto.PublicKey}).
@type progress_func: function
@return: new private key
@rtype: L{DSSKey}
"""
dsa = DSA.generate(bits, rng.read, progress_func)
key = DSSKey(vals=(dsa.p, dsa.q, dsa.g, dsa.y))
key.x = dsa.x
return key
generate = staticmethod(generate)
### internals...
def _from_private_key_file(self, filename, password):
data = self._read_private_key_file('DSA', filename, password)
self._decode_key(data)
def _from_private_key(self, file_obj, password):
data = self._read_private_key('DSA', file_obj, password)
self._decode_key(data)
def _decode_key(self, data):
# private key file contains:
# DSAPrivateKey = { version = 0, p, q, g, y, x }
try:
keylist = BER(data).decode()
except BERException, x:
raise SSHException('Unable to parse key file: ' + str(x))
if (type(keylist) is not list) or (len(keylist) < 6) or (keylist[0] != 0):
raise SSHException('not a valid DSA private key file (bad ber encoding)')
self.p = keylist[1]
self.q = keylist[2]
self.g = keylist[3]
self.y = keylist[4]
self.x = keylist[5]
self.size = util.bit_length(self.p)
|
epeios-q37/epeios | other/exercises/basics/workshop/_/display.py | Python | agpl-3.0 | 1,654 | 0.011487 | # coding: utf-8
"""
MIT License
Copyright (c) 2019 Claude SIMON (https://q37.info/s/rmnmqd49)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, sub | ject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABI | LITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import workshop._._ as _
_OUTPUT = "output"
def _dom():
return _.dom()
def clear():
_dom().setLayout(_OUTPUT, "<span/>")
def display(text):
output = _.Atlas.createHTML()
output.putTagAndValue("h1", text)
_dom().appendLayout(_OUTPUT, output)
def clearAndDisplay(text):
output = _.Atlas.createHTML()
output.putTagAndValue("h1", text)
_dom().setLayout(_OUTPUT, output)
def alert(text):
_dom().alert(text)
def confirm(text):
return _dom().confirm(text)
|
hgrimelid/feincms | feincms/content/comments/models.py | Python | bsd-3-clause | 3,732 | 0.003215 | # ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
#
# Created by Martin J. Laubach on 08.01.10.
# skyl wuz here (11.05.10)
#
# ------------------------------------------------------------------------
"""
Embed a comment list and comment form anywhere. Uses the standard
``django.contrib.comments`` application.
"""
from django import forms
from django.contrib import comments
from django.contrib.comments.models import Comment
from django.db import models
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
# ------------------------------------------------------------------------
class CommentsContent(models.Model):
comments_enabled = models.BooleanField(_('enabled'), default=True, help_text=_('New comments may be added'))
class Meta:
abstract = True
verbose_name = _('comments')
verbose_name_plural = _('comments')
@classmethod
def initialize_type(cls):
from feincms.admin.editor import ItemEditorForm
class CommentContentAdminForm(ItemEditorForm):
def __init__(self, *args, **kwargs):
super(CommentContentAdminForm, self).__init__(*args, **kwargs)
parent = kwargs.get('instance', None)
if parent is not None:
f = self.fields['comments_enabled']
| r = f.help_text
r += u'<hr />'
for c in Comment.objects.for_model(parent.parent).order_by('-submit_date'):
r += | '<div class="form-row" style="margin-left: 60px"># %d <a href="/admin/comments/comment/%d/">%s</a> - %s</div>' % \
( c.id, c.id, c.comment[:80], c.is_public and _('public') or _('not public') )
f.help_text = r
cls.feincms_item_editor_form = CommentContentAdminForm
def process(self, request):
parent_type = self.parent.__class__.__name__.lower()
comment_page = self.parent
if hasattr(comment_page, 'original_translation') and comment_page.original_translation:
comment_page = comment_page.original_translation
f = None
if self.comments_enabled and request.POST:
# I guess the drawback is that this page can't handle any other types of posts
# just the comments for right now, but if we just post to the current path
# and handle it this way .. at least it works for now.
#extra = request._feincms_extra_context.get('page_extra_path', ())
#if len(extra) > 0 and extra[0] == u"post-comment":
from django.contrib.comments.views.comments import post_comment
r = post_comment(request, next=comment_page.get_absolute_url())
if isinstance(r, HttpResponseRedirect):
return r
f = comments.get_form()(comment_page, data=request.POST)
if f is None:
f = comments.get_form()(comment_page)
self.rendered_output = render_to_string([
'content/comments/%s.html' % parent_type,
'content/comments/default-site.html',
'content/comments/default.html',
], RequestContext(request, {
'content': self,
'feincms_page': self.parent,
'parent': comment_page,
'form': f,
}))
def render(self, **kwargs):
return getattr(self, 'rendered_output', u'')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.