repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
dotKom/onlineweb4
|
refs/heads/develop
|
apps/webshop/migrations/0004_auto_20151118_2242.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('webshop', '0003_auto_20151118_2208'),
]
operations = [
migrations.AlterField(
model_name='orderline',
name='datetime',
field=models.DateTimeField(null=True, blank=True),
),
]
|
deepakselvaraj/federated-horizon
|
refs/heads/master
|
horizon/browsers/__init__.py
|
11
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from horizon.browsers.base import ResourceBrowser # noqa
from horizon.browsers.views import ResourceBrowserView # noqa
assert ResourceBrowser
assert ResourceBrowserView
|
erc7as/cs3240-labdemo
|
refs/heads/master
|
hello.py
|
1
|
__author__ = 'erc7as'
from helper import greeting
from check import check
greeting('hello')
greeting('goodbye')
check('yo')
|
OpenLD/enigma2-wetek
|
refs/heads/master
|
lib/python/Components/DiskInfo.py
|
45
|
from GUIComponent import GUIComponent
from VariableText import VariableText
from os import statvfs
from enigma import eLabel
# TODO: Harddisk.py has similiar functions, but only similiar.
# fix this to use same code
class DiskInfo(VariableText, GUIComponent):
FREE = 0
USED = 1
SIZE = 2
def __init__(self, path, type, update = True):
GUIComponent.__init__(self)
VariableText.__init__(self)
self.type = type
self.path = path
if update:
self.update()
def update(self):
try:
stat = statvfs(self.path)
except OSError:
return -1
if self.type == self.FREE:
try:
percent = '(' + str((100 * stat.f_bavail) // stat.f_blocks) + '%)'
free = stat.f_bfree * stat.f_bsize
if free < 10000000:
free = _("%d kB") % (free >> 10)
elif free < 10000000000:
free = _("%d MB") % (free >> 20)
else:
free = _("%d Gb") % (free >> 30)
self.setText(" ".join((free, percent, _("free diskspace"))))
except:
# occurs when f_blocks is 0 or a similar error
self.setText("-?-")
GUI_WIDGET = eLabel
|
microcom/hr
|
refs/heads/8.0
|
hr_expense_invoice/models/__init__.py
|
23
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from . import hr_expense_expense
|
bazingasheldon/tv
|
refs/heads/master
|
raspberry/p2pstreams scraper/glob.py
|
2
|
import xbmc, xbmcgui, xbmcaddon
import urllib, urllib2
import os, stat
addon = xbmcaddon.Addon('plugin.video.streams')
def addon_log(string):
DEBUG = addon.getSetting('debug')
ADDON_VERSION = addon.getAddonInfo('version')
if DEBUG == 'true':
if isinstance(string, unicode):
string = string.encode('utf-8')
xbmc.log("[plugin.video.streams-%s]: %s" %(ADDON_VERSION, string))
def Downloader(url,dest,description,heading):
dp = xbmcgui.DialogProgress()
dp.create(heading,description,url)
dp.update(0)
urllib.urlretrieve(url,dest,lambda nb, bs, fs, url=url: _pbhook(nb,bs,fs,dp))
def _pbhook(numblocks, blocksize, filesize, dp=None):
try:
percent = int((int(numblocks)*int(blocksize)*100)/int(filesize))
dp.update(percent)
except:
percent = 100
dp.update(percent)
if dp.iscanceled():
#raise KeyboardInterrupt
dp.close()
def message(title, message):
dialog = xbmcgui.Dialog()
dialog.ok(title, message)
def is_exe(fpath):
if os.path.isfile(fpath):
if (os.access(fpath, os.X_OK) != True) :
st = os.stat(fpath)
os.chmod(fpath, st.st_mode | stat.S_IEXEC)
|
dorotapalicova/GoldDigger
|
refs/heads/Fixer.io
|
gold_digger/api_server/app.py
|
1
|
# -*- coding: utf-8 -*-
from .api_server import API
app = API()
|
yogesh2021/qds-sdk-py
|
refs/heads/master
|
qds_sdk/scheduler.py
|
8
|
import json
from qds_sdk.qubole import Qubole
from qds_sdk.resource import Resource
from argparse import ArgumentParser
from qds_sdk.commands import *
from qds_sdk.actions import *
class SchedulerCmdLine:
"""
qds_sdk.ScheduleCmdLine is the interface used by qds.py.
"""
@staticmethod
def parsers():
argparser = ArgumentParser(prog="qds.py scheduler",
description="Scheduler client for Qubole Data Service.")
subparsers = argparser.add_subparsers()
#Create
create = subparsers.add_parser("create",
help="Create a new schedule")
create.add_argument("--data", dest="data", required=True,
help="Path to a file that contains scheduler attributes as a json object")
create.set_defaults(func=SchedulerCmdLine.create)
#List
list = subparsers.add_parser("list",
help="List all schedulers")
list.add_argument("--fields", nargs="*", dest="fields",
help="List of fields to show")
list.add_argument("--per-page", dest="per_page",
help="Number of items per page")
list.add_argument("--page", dest="page",
help="Page Number")
list.set_defaults(func=SchedulerCmdLine.list)
#View
view = subparsers.add_parser("view",
help="View a specific schedule")
view.add_argument("id", help="Numeric id of the schedule")
view.add_argument("--fields", nargs="*", dest="fields",
help="List of fields to show")
view.set_defaults(func=SchedulerCmdLine.view)
#View by name
view_by_name = subparsers.add_parser("view_by_name",
help="View a specific schedule")
view_by_name.add_argument("name", help="Name of the schedule")
view_by_name.add_argument("--fields", nargs="*", dest="fields",
help="List of fields to show")
view_by_name.set_defaults(func=SchedulerCmdLine.view_by_name)
#Suspend
suspend = subparsers.add_parser("suspend",
help="Suspend a specific schedule")
suspend.add_argument("id", help="Numeric id or name of the schedule")
suspend.set_defaults(func=SchedulerCmdLine.suspend)
#Resume
resume = subparsers.add_parser("resume",
help="Resume a specific schedule")
resume.add_argument("id", help="Numeric id or name of the schedule")
resume.set_defaults(func=SchedulerCmdLine.resume)
#Kill
kill = subparsers.add_parser("kill",
help="Kill a specific schedule")
kill.add_argument("id", help="Numeric id or name of the schedule")
kill.set_defaults(func=SchedulerCmdLine.kill)
#List Actions
list_actions = subparsers.add_parser("list-actions",
help="List actions of a specific schedule")
list_actions.add_argument("id", help="Numeric id or name of the schedule")
list_actions.add_argument("--sequence_id", dest="sequence_id", help="Sequence id of the actions to list")
list_actions.add_argument("--fields", nargs="*", dest="fields",
help="List of fields to show")
list_actions.add_argument("--per-page", dest="per_page",
help="Number of items per page")
list_actions.add_argument("--page", dest="page",
help="Page Number")
list_actions.set_defaults(func=SchedulerCmdLine.list_actions)
#List Instances
list_instances = subparsers.add_parser("list-instances",
help="List instances of a specific schedule")
list_instances.add_argument("id", help="Numeric id or name of the schedule")
list_instances.add_argument("--fields", nargs="*", dest="fields",
help="List of fields to show")
list_instances.add_argument("--per-page", dest="per_page",
help="Number of items per page")
list_instances.add_argument("--page", dest="page",
help="Page Number")
list_instances.set_defaults(func=SchedulerCmdLine.list_instances)
rerun = subparsers.add_parser("rerun",
help="Rerun an instance of a schedule")
rerun.add_argument("id", help="Numeric id or name of the schedule")
rerun.add_argument("instance_id", help="Numeric id of the instance")
rerun.set_defaults(func=SchedulerCmdLine.rerun)
return argparser
@staticmethod
def run(args):
parser = SchedulerCmdLine.parsers()
parsed = parser.parse_args(args)
return parsed.func(parsed)
@staticmethod
def filter_fields(schedule, fields):
filtered = {}
for field in fields:
filtered[field] = schedule[field]
return filtered
@staticmethod
def create(args):
with open(args.data) as f:
spec = json.load(f)
schedule = Scheduler(spec)
return json.dumps(schedule.attributes, sort_keys=True, indent=4)
@staticmethod
def list(args):
schedlist = Scheduler.list(args.page, args.per_page)
if args.fields:
for s in schedlist:
s.attributes = SchedulerCmdLine.filter_fields(s.attributes, args.fields)
return json.dumps(schedlist, default=lambda o: o.attributes, sort_keys=True, indent=4)
@staticmethod
def view(args):
schedule = Scheduler.find(args.id)
if args.fields:
schedule.attributes = SchedulerCmdLine.filter_fields(schedule.attributes, args.fields)
return json.dumps(schedule.attributes, sort_keys=True, indent=4)
@staticmethod
def view_by_name(args):
schedule = Scheduler.find_by_name(args.name)
if schedule is None:
return "Schedule '%s' not found" % args.name
if args.fields:
schedule.attributes = SchedulerCmdLine.filter_fields(schedule.attributes, args.fields)
return json.dumps(schedule.attributes, sort_keys=True, indent=4)
@staticmethod
def suspend(args):
schedule = Scheduler.find(args.id)
return json.dumps(schedule.suspend(), sort_keys=True, indent=4)
@staticmethod
def resume(args):
schedule = Scheduler.find(args.id)
return json.dumps(schedule.resume(), sort_keys=True, indent=4)
@staticmethod
def kill(args):
schedule = Scheduler.find(args.id)
return json.dumps(schedule.kill(), sort_keys=True, indent=4)
@staticmethod
def list_actions(args):
schedule = Scheduler.find(args.id)
actlist = schedule.list_actions(args.sequence_id, args.page, args.per_page)
if args.fields:
for a in actlist:
a.attributes = ActionCmdLine.filter_fields(a.attributes, args.fields)
return json.dumps(actlist, default=lambda o: o.attributes,
sort_keys=True, indent=4)
@staticmethod
def list_instances(args):
schedule = Scheduler.find(args.id)
cmdlist = schedule.list_instances(args.page, args.per_page)
if args.fields:
for cmd in cmdlist:
cmd.attributes = SchedulerCmdLine.filter_fields(cmd.attributes, args.fields)
return json.dumps(cmdlist, default=lambda o: o.attributes,
sort_keys=True, indent=4)
@staticmethod
def rerun(args):
schedule = Scheduler.find(args.id)
return schedule.rerun(args.instance_id)
class Scheduler(Resource):
"""
qds_sdk.Schedule is the base Qubole Schedule class.
"""
""" all commands use the /scheduler endpoint"""
rest_entity_path = "scheduler"
@staticmethod
def list(page = None, per_page = None):
conn = Qubole.agent()
url_path = Scheduler.rest_entity_path
page_attr = []
if page is not None:
page_attr.append("page=%s" % page)
if per_page is not None:
page_attr.append("per_page=%s" % per_page)
if page_attr:
url_path = "%s?%s" % (Scheduler.rest_entity_path, "&".join(page_attr))
#Todo Page numbers are thrown away right now
schedjson = conn.get(url_path)
schedlist = []
for s in schedjson["schedules"]:
schedlist.append(Scheduler(s))
return schedlist
@staticmethod
def find_by_name(name):
conn = Qubole.agent()
if name is not None:
schedjson = conn.get(Scheduler.rest_entity_path, params={"name":name})
if schedjson["schedules"]:
return Scheduler(schedjson["schedules"][0])
return None
def suspend(self):
conn = Qubole.agent()
data = {"status": "suspend"}
return conn.put(self.element_path(self.id), data)
def resume(self):
conn = Qubole.agent()
data = {"status": "resume"}
return conn.put(self.element_path(self.id), data)
def kill(self):
conn = Qubole.agent()
data = {"status": "kill"}
return conn.put(self.element_path(self.id), data)
def list_actions(self, sequence_id = None, page=None, per_page=None):
conn = Qubole.agent()
url_path = self.element_path(self.id) + "/" + "actions"
if sequence_id is not None:
url_path = url_path + "/" + str(sequence_id)
params = {}
if page is not None:
params['page'] = page
if per_page is not None:
params['per_page'] = per_page
#Todo Page numbers are thrown away right now
actjson = conn.get(url_path, params)
actlist = []
for act in actjson["actions"]:
actlist.append(Action(act))
return actlist
def list_instances(self, page=None, per_page=None):
conn = Qubole.agent()
url_path = self.element_path(self.id) + "/" + "instances"
page_attr = []
if page is not None:
page_attr.append("page=%s" % page)
if per_page is not None:
page_attr.append("per_page=%s" % per_page)
if page_attr:
url_path = "%s/instances?%s" % (self.element_path(args.id), "&".join(page_attr))
#Todo Page numbers are thrown away right now
cmdjson = conn.get(url_path)
cmdlist = []
for cmd in cmdjson["commands"]:
cmdclass = globals()[cmd["command_type"]]
onecmd = cmdclass(cmd)
cmdlist.append(onecmd)
return cmdlist
def rerun(self, instance_id):
conn = Qubole.agent()
url_path = self.element_path(id) + "/instances/" + instance_id + "/rerun"
return conn.post(url_path)['status']
|
tiagoantao/bioinf-python
|
refs/heads/master
|
notebooks/08_Advanced/Spark.py
|
2
|
from __future__ import division, print_function
from pyspark import SparkContext
sc = SparkContext(appName='Processing PLINK frequencies')
plink_freqs = sc.wholeTextFiles('tsi-*-*.frq')
freqs = plink_freqs.mapValues(lambda content:
[float([tok for tok in l.split(' ')
if tok != ''][-2])
for l in content.split('\n')[1:-1]])
first = freqs.first()
print(first)
flat = freqs.flatMapValues(lambda my_freqs: my_freqs)
def bla(e, acc):
return (acc[0] + e[1], acc[1] + 1)
my_sum, cnt = flat.fold((0.0, 0), bla)
#my_sum, cnt = flat.fold((0.0, 0), lambda e, acc: (acc[0] + e[1], acc[1] + 1))
print(my_sum, cnt, my_sum / cnt)
|
gorel/dli-reports
|
refs/heads/master
|
dli_app/mod_wiki/models.py
|
1
|
"""Models for the wiki module
Author: Logan Gore
This file is responsible for defining models that belong in the wiki module.
"""
import datetime
from dli_app import db
from flask_login import current_user
class WikiPage(db.Model):
"""Model for a page on the wiki"""
__tablename__ = 'wiki_page'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), index=True, unique=True)
content = db.Column(db.Text)
modtime = db.Column(db.String(32))
editor = db.Column(db.String(64))
views = db.Column(db.Integer, index=True)
def __init__(self, name, content):
"""Initiialize a WikiPage model"""
self.name = name
self.content = content
self.modtime = datetime.datetime.now().strftime('%m/%d/%Y %I:%M %p')
if current_user:
self.editor = current_user.name
else:
self.editor = 'DLI'
self.views = 0
def __repr__(self):
"""Return a descriptive representation of a WikiPage"""
return '<WikiPage %r>' % self.name
def with_toc(self):
"""Return the page contents with a Table of Contents header"""
full_text = """
[TOC]
{content}
""".format(content=self.content)
return full_text
|
ampax/edx-platform-backup
|
refs/heads/live
|
common/djangoapps/student/tests/test_microsite.py
|
24
|
"""
Test for User Creation from Micro-Sites
"""
from django.test import TestCase
from student.models import UserSignupSource
import mock
import json
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
FAKE_MICROSITE = {
"SITE_NAME": "openedx.localhost",
"university": "fakeuniversity",
"course_org_filter": "fakeorg",
"REGISTRATION_EXTRA_FIELDS": {
"address1": "required",
"city": "required",
"state": "required",
"country": "required",
"company": "required",
"title": "required"
},
"extended_profile_fields": [
"address1", "state", "company", "title"
]
}
def fake_site_name(name, default=None):
"""
create a fake microsite site name
"""
if name == 'SITE_NAME':
return 'openedx.localhost'
else:
return default
def fake_microsite_get_value(name, default=None):
"""
create a fake microsite site name
"""
return FAKE_MICROSITE.get(name, default)
class TestMicrosite(TestCase):
"""Test for Account Creation from a white labeled Micro-Sites"""
def setUp(self):
self.username = "test_user"
self.url = reverse("create_account")
self.params = {
"username": self.username,
"email": "test@example.org",
"password": "testpass",
"name": "Test User",
"honor_code": "true",
"terms_of_service": "true",
}
self.extended_params = dict(self.params.items() + {
"address1": "foo",
"city": "foo",
"state": "foo",
"country": "foo",
"company": "foo",
"title": "foo"
}.items())
@mock.patch("microsite_configuration.microsite.get_value", fake_site_name)
def test_user_signup_source(self):
"""
test to create a user form the microsite and see that it record has been
saved in the UserSignupSource Table
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 200)
self.assertGreater(len(UserSignupSource.objects.filter(site='openedx.localhost')), 0)
def test_user_signup_from_non_micro_site(self):
"""
test to create a user form the non-microsite. The record should not be saved
in the UserSignupSource Table
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(UserSignupSource.objects.filter(site='openedx.localhost')), 0)
@mock.patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
def test_user_signup_missing_enhanced_profile(self):
"""
test to create a user form the microsite but don't provide any of the microsite specific
profile information
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 400)
@mock.patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
def test_user_signup_including_enhanced_profile(self):
"""
test to create a user form the microsite but don't provide any of the microsite specific
profile information
"""
response = self.client.post(self.url, self.extended_params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(username=self.username)
meta = json.loads(user.profile.meta)
self.assertEqual(meta['address1'], 'foo')
self.assertEqual(meta['state'], 'foo')
self.assertEqual(meta['company'], 'foo')
self.assertEqual(meta['title'], 'foo')
|
laosiaudi/tensorflow
|
refs/heads/master
|
tensorflow/python/training/adagrad_test.py
|
12
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for aggregate operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class AdagradOptimizerTest(tf.test.TestCase):
def doTestBasic(self, use_locking=False):
for dtype in [tf.half, tf.float32, tf.float64]:
with self.test_session():
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
ada_opt = tf.train.AdagradOptimizer(3.0,
initial_accumulator_value=0.1,
use_locking=use_locking)
ada_update = ada_opt.apply_gradients(zip(
[grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Run 3 steps of adagrad
for _ in range(3):
ada_update.run()
# Validate updated params
self.assertAllCloseAccordingToType(
np.array([-1.6026098728179932, -0.6026098728179932]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([2.715679168701172, 3.715679168701172]), var1.eval())
def testBasic(self):
self.doTestBasic(use_locking=False)
def testBasicLocked(self):
self.doTestBasic(use_locking=True)
def testTensorLearningRate(self):
for dtype in [tf.half, tf.float32, tf.float64]:
with self.test_session():
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
ada_opt = tf.train.AdagradOptimizer(
tf.constant(3.0),
initial_accumulator_value=0.1)
ada_update = ada_opt.apply_gradients(zip(
[grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Run 3 steps of adagrad
for _ in range(3):
ada_update.run()
# Validate updated params
self.assertAllCloseAccordingToType(
np.array([-1.6026098728179932, -0.6026098728179932]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([2.715679168701172, 3.715679168701172]), var1.eval())
def testSparseBasic(self):
for dtype in [tf.half, tf.float32, tf.float64]:
with self.test_session():
var0 = tf.Variable([[1.0], [2.0]], dtype=dtype)
var1 = tf.Variable([[3.0], [4.0]], dtype=dtype)
grads0 = tf.IndexedSlices(
tf.constant([0.1], shape=[1, 1], dtype=dtype),
tf.constant([0]),
tf.constant([2, 1]))
grads1 = tf.IndexedSlices(
tf.constant([0.01], shape=[1, 1], dtype=dtype),
tf.constant([1]),
tf.constant([2, 1]))
ada_opt = tf.train.AdagradOptimizer(3.0, initial_accumulator_value=0.1)
ada_update = ada_opt.apply_gradients(zip(
[grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllClose([[1.0], [2.0]], var0.eval())
self.assertAllClose([[3.0], [4.0]], var1.eval())
# Run 3 step of sgd
for _ in range(3):
ada_update.run()
# Validate updated params
self.assertAllCloseAccordingToType(
np.array([[-1.6026098728179932], [2.0]]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([[3.0], [3.715679168701172]]), var1.eval())
def testSparseStability(self):
for dtype in [tf.half, tf.float32, tf.float64]:
with self.test_session():
shape = [1, 6]
var0 = tf.Variable(
[[0.00872496, -0.106952, 0.110467, 0.226505, -0.0147257,
-0.0105945]],
dtype=dtype)
grads0 = tf.IndexedSlices(
tf.constant(
[[-5.91278e-05, 5.31673e-05, -2.5779e-06, 4.29153e-05,
-8.4877e-05, -9.48906e-05]],
shape=shape,
dtype=dtype),
tf.constant([0]),
tf.constant(shape))
ada_opt = tf.train.AdagradOptimizer(1.0, initial_accumulator_value=0.1)
ada_update = ada_opt.apply_gradients(zip([grads0], [var0]))
self.assertEqual(["accumulator"], ada_opt.get_slot_names())
slot0 = ada_opt.get_slot(var0, "accumulator")
init = tf.global_variables_initializer()
for _ in range(100):
init.run()
ada_update.run()
self.assertAllCloseAccordingToType(
np.array([[0.1, 0.1, 0.1, 0.1, 0.1, 0.1]]), slot0.eval())
self.assertAllCloseAccordingToType(
np.array([[0.00891194, -0.10712013, 0.11047515, 0.22636929, -
0.0144573, -0.01029443]]), var0.eval())
def testSharing(self):
for dtype in [tf.half, tf.float32, tf.float64]:
with self.test_session():
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
ada_opt = tf.train.AdagradOptimizer(3.0)
# Apply the optimizer twice. Both applications will use
# the same accums.
ada_update1 = ada_opt.apply_gradients(zip(
[grads0, grads1], [var0, var1]))
ada_update2 = ada_opt.apply_gradients(zip(
[grads0, grads1], [var0, var1]))
self.assertEqual(["accumulator"], ada_opt.get_slot_names())
slot0 = ada_opt.get_slot(var0, "accumulator")
self.assertEquals(slot0.get_shape(), var0.get_shape())
slot1 = ada_opt.get_slot(var1, "accumulator")
self.assertEquals(slot1.get_shape(), var1.get_shape())
tf.global_variables_initializer().run()
# Fetch params to validate initial values.
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Mix the first and the second adagrad for 3 steps.
ada_update1.run()
ada_update2.run()
ada_update1.run()
# Validate updated params (the same as with only 1 Adagrad).
self.assertAllCloseAccordingToType(
np.array([-1.6026098728179932, -0.6026098728179932]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([2.715679168701172, 3.715679168701172]), var1.eval())
if __name__ == "__main__":
tf.test.main()
|
mastizada/pontoon
|
refs/heads/master
|
pontoon/base/migrations/0096_add_rejected_option_to_translation.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-08-01 15:32
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('base', '0095_pontoon_intro_permalink_prefix'),
]
operations = [
migrations.AddField(
model_name='translation',
name='rejected',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='translation',
name='rejected_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='translation',
name='rejected_user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rejected_translations', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='translation',
name='unrejected_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='translation',
name='unrejected_user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='unrejected_translations', to=settings.AUTH_USER_MODEL),
),
]
|
garbled1/ansible
|
refs/heads/devel
|
test/units/parsing/utils/test_jsonify.py
|
119
|
# -*- coding: utf-8 -*-
# (c) 2016, James Cammarata <jimi@sngx.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.parsing.utils.jsonify import jsonify
class TestJsonify(unittest.TestCase):
def test_jsonify_simple(self):
self.assertEqual(jsonify(dict(a=1, b=2, c=3)), '{"a": 1, "b": 2, "c": 3}')
def test_jsonify_simple_format(self):
res = jsonify(dict(a=1, b=2, c=3), format=True)
cleaned = "".join([x.strip() for x in res.splitlines()])
self.assertEqual(cleaned, '{"a": 1,"b": 2,"c": 3}')
def test_jsonify_unicode(self):
self.assertEqual(jsonify(dict(toshio=u'くらとみ')), u'{"toshio": "くらとみ"}')
def test_jsonify_empty(self):
self.assertEqual(jsonify(None), '{}')
|
augustozuniga/arisgames
|
refs/heads/master
|
zxing-master/cpp/scons/scons-local-2.0.0.final.0/SCons/compat/_scons_collections.py
|
34
|
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__doc__ = """
collections compatibility module for older (pre-2.4) Python versions
This does not not NOT (repeat, *NOT*) provide complete collections
functionality. It only wraps the portions of collections functionality
used by SCons, in an interface that looks enough like collections for
our purposes.
"""
__revision__ = "src/engine/SCons/compat/_scons_collections.py 5023 2010/06/14 22:05:46 scons"
# Use exec to hide old names from fixers.
exec("""if True:
from UserDict import UserDict
from UserList import UserList
from UserString import UserString""")
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
richardotis/scipy
|
refs/heads/master
|
scipy/optimize/tests/test_optimize.py
|
18
|
"""
Unit tests for optimization routines from optimize.py
Authors:
Ed Schofield, Nov 2005
Andrew Straw, April 2008
To run it in its simplest form::
nosetests test_optimize.py
"""
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from numpy.testing import (assert_raises, assert_allclose, assert_equal,
assert_, TestCase, run_module_suite, dec,
assert_almost_equal)
from scipy._lib._testutils import suppressed_stdout
from scipy import optimize
def test_check_grad():
# Verify if check_grad is able to estimate the derivative of the
# logistic function.
def logit(x):
return 1 / (1 + np.exp(-x))
def der_logit(x):
return np.exp(-x) / (1 + np.exp(-x))**2
x0 = np.array([1.5])
r = optimize.check_grad(logit, der_logit, x0)
assert_almost_equal(r, 0)
r = optimize.check_grad(logit, der_logit, x0, epsilon=1e-6)
assert_almost_equal(r, 0)
# Check if the epsilon parameter is being considered.
r = abs(optimize.check_grad(logit, der_logit, x0, epsilon=1e-1) - 0)
assert_(r > 1e-7)
class CheckOptimize(object):
""" Base test case for a simple constrained entropy maximization problem
(the machine translation example of Berger et al in
Computational Linguistics, vol 22, num 1, pp 39--72, 1996.)
"""
def setUp(self):
self.F = np.array([[1,1,1],[1,1,0],[1,0,1],[1,0,0],[1,0,0]])
self.K = np.array([1., 0.3, 0.5])
self.startparams = np.zeros(3, np.float64)
self.solution = np.array([0., -0.524869316, 0.487525860])
self.maxiter = 1000
self.funccalls = 0
self.gradcalls = 0
self.trace = []
def func(self, x):
self.funccalls += 1
if self.funccalls > 6000:
raise RuntimeError("too many iterations in optimization routine")
log_pdot = np.dot(self.F, x)
logZ = np.log(sum(np.exp(log_pdot)))
f = logZ - np.dot(self.K, x)
self.trace.append(x)
return f
def grad(self, x):
self.gradcalls += 1
log_pdot = np.dot(self.F, x)
logZ = np.log(sum(np.exp(log_pdot)))
p = np.exp(log_pdot - logZ)
return np.dot(self.F.transpose(), p) - self.K
def hess(self, x):
log_pdot = np.dot(self.F, x)
logZ = np.log(sum(np.exp(log_pdot)))
p = np.exp(log_pdot - logZ)
return np.dot(self.F.T,
np.dot(np.diag(p), self.F - np.dot(self.F.T, p)))
def hessp(self, x, p):
return np.dot(self.hess(x), p)
class CheckOptimizeParameterized(CheckOptimize):
@suppressed_stdout
def test_cg(self):
# conjugate gradient optimization routine
if self.use_wrapper:
opts = {'maxiter': self.maxiter, 'disp': self.disp,
'return_all': False}
res = optimize.minimize(self.func, self.startparams, args=(),
method='CG', jac=self.grad,
options=opts)
params, fopt, func_calls, grad_calls, warnflag = \
res['x'], res['fun'], res['nfev'], res['njev'], res['status']
else:
retval = optimize.fmin_cg(self.func, self.startparams,
self.grad, (), maxiter=self.maxiter,
full_output=True, disp=self.disp,
retall=False)
(params, fopt, func_calls, grad_calls, warnflag) = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
# Ensure that function call counts are 'known good'; these are from
# Scipy 0.7.0. Don't allow them to increase.
assert_(self.funccalls == 9, self.funccalls)
assert_(self.gradcalls == 7, self.gradcalls)
# Ensure that the function behaves the same; this is from Scipy 0.7.0
assert_allclose(self.trace[2:4],
[[0, -0.5, 0.5],
[0, -5.05700028e-01, 4.95985862e-01]],
atol=1e-14, rtol=1e-7)
@suppressed_stdout
def test_bfgs(self):
# Broyden-Fletcher-Goldfarb-Shanno optimization routine
if self.use_wrapper:
opts = {'maxiter': self.maxiter, 'disp': self.disp,
'return_all': False}
res = optimize.minimize(self.func, self.startparams,
jac=self.grad, method='BFGS', args=(),
options=opts)
params, fopt, gopt, Hopt, func_calls, grad_calls, warnflag = (
res['x'], res['fun'], res['jac'], res['hess_inv'],
res['nfev'], res['njev'], res['status'])
else:
retval = optimize.fmin_bfgs(self.func, self.startparams, self.grad,
args=(), maxiter=self.maxiter,
full_output=True, disp=self.disp,
retall=False)
(params, fopt, gopt, Hopt, func_calls, grad_calls, warnflag) = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
# Ensure that function call counts are 'known good'; these are from
# Scipy 0.7.0. Don't allow them to increase.
assert_(self.funccalls == 10, self.funccalls)
assert_(self.gradcalls == 8, self.gradcalls)
# Ensure that the function behaves the same; this is from Scipy 0.7.0
assert_allclose(self.trace[6:8],
[[0, -5.25060743e-01, 4.87748473e-01],
[0, -5.24885582e-01, 4.87530347e-01]],
atol=1e-14, rtol=1e-7)
@suppressed_stdout
def test_bfgs_infinite(self):
# Test corner case where -Inf is the minimum. See gh-2019.
func = lambda x: -np.e**-x
fprime = lambda x: -func(x)
x0 = [0]
olderr = np.seterr(over='ignore')
try:
if self.use_wrapper:
opts = {'disp': self.disp}
x = optimize.minimize(func, x0, jac=fprime, method='BFGS',
args=(), options=opts)['x']
else:
x = optimize.fmin_bfgs(func, x0, fprime, disp=self.disp)
assert_(not np.isfinite(func(x)))
finally:
np.seterr(**olderr)
@suppressed_stdout
def test_powell(self):
# Powell (direction set) optimization routine
if self.use_wrapper:
opts = {'maxiter': self.maxiter, 'disp': self.disp,
'return_all': False}
res = optimize.minimize(self.func, self.startparams, args=(),
method='Powell', options=opts)
params, fopt, direc, numiter, func_calls, warnflag = (
res['x'], res['fun'], res['direc'], res['nit'],
res['nfev'], res['status'])
else:
retval = optimize.fmin_powell(self.func, self.startparams,
args=(), maxiter=self.maxiter,
full_output=True, disp=self.disp,
retall=False)
(params, fopt, direc, numiter, func_calls, warnflag) = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
# Ensure that function call counts are 'known good'; these are from
# Scipy 0.7.0. Don't allow them to increase.
#
# However, some leeway must be added: the exact evaluation
# count is sensitive to numerical error, and floating-point
# computations are not bit-for-bit reproducible across
# machines, and when using e.g. MKL, data alignment
# etc. affect the rounding error.
#
assert_(self.funccalls <= 116 + 20, self.funccalls)
assert_(self.gradcalls == 0, self.gradcalls)
# Ensure that the function behaves the same; this is from Scipy 0.7.0
assert_allclose(self.trace[34:39],
[[0.72949016, -0.44156936, 0.47100962],
[0.72949016, -0.44156936, 0.48052496],
[1.45898031, -0.88313872, 0.95153458],
[0.72949016, -0.44156936, 0.47576729],
[1.72949016, -0.44156936, 0.47576729]],
atol=1e-14, rtol=1e-7)
@suppressed_stdout
def test_neldermead(self):
# Nelder-Mead simplex algorithm
if self.use_wrapper:
opts = {'maxiter': self.maxiter, 'disp': self.disp,
'return_all': False}
res = optimize.minimize(self.func, self.startparams, args=(),
method='Nelder-mead', options=opts)
params, fopt, numiter, func_calls, warnflag = (
res['x'], res['fun'], res['nit'], res['nfev'],
res['status'])
else:
retval = optimize.fmin(self.func, self.startparams,
args=(), maxiter=self.maxiter,
full_output=True, disp=self.disp,
retall=False)
(params, fopt, numiter, func_calls, warnflag) = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
# Ensure that function call counts are 'known good'; these are from
# Scipy 0.7.0. Don't allow them to increase.
assert_(self.funccalls == 167, self.funccalls)
assert_(self.gradcalls == 0, self.gradcalls)
# Ensure that the function behaves the same; this is from Scipy 0.7.0
assert_allclose(self.trace[76:78],
[[0.1928968, -0.62780447, 0.35166118],
[0.19572515, -0.63648426, 0.35838135]],
atol=1e-14, rtol=1e-7)
@suppressed_stdout
def test_ncg(self):
# line-search Newton conjugate gradient optimization routine
if self.use_wrapper:
opts = {'maxiter': self.maxiter, 'disp': self.disp,
'return_all': False}
retval = optimize.minimize(self.func, self.startparams,
method='Newton-CG', jac=self.grad,
args=(), options=opts)['x']
else:
retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,
args=(), maxiter=self.maxiter,
full_output=False, disp=self.disp,
retall=False)
params = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
# Ensure that function call counts are 'known good'; these are from
# Scipy 0.7.0. Don't allow them to increase.
assert_(self.funccalls == 7, self.funccalls)
assert_(self.gradcalls <= 22, self.gradcalls) # 0.13.0
#assert_(self.gradcalls <= 18, self.gradcalls) # 0.9.0
#assert_(self.gradcalls == 18, self.gradcalls) # 0.8.0
#assert_(self.gradcalls == 22, self.gradcalls) # 0.7.0
# Ensure that the function behaves the same; this is from Scipy 0.7.0
assert_allclose(self.trace[3:5],
[[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01],
[-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]],
atol=1e-6, rtol=1e-7)
@suppressed_stdout
def test_ncg_hess(self):
# Newton conjugate gradient with Hessian
if self.use_wrapper:
opts = {'maxiter': self.maxiter, 'disp': self.disp,
'return_all': False}
retval = optimize.minimize(self.func, self.startparams,
method='Newton-CG', jac=self.grad,
hess=self.hess,
args=(), options=opts)['x']
else:
retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,
fhess=self.hess,
args=(), maxiter=self.maxiter,
full_output=False, disp=self.disp,
retall=False)
params = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
# Ensure that function call counts are 'known good'; these are from
# Scipy 0.7.0. Don't allow them to increase.
assert_(self.funccalls == 7, self.funccalls)
assert_(self.gradcalls <= 18, self.gradcalls) # 0.9.0
# assert_(self.gradcalls == 18, self.gradcalls) # 0.8.0
# assert_(self.gradcalls == 22, self.gradcalls) # 0.7.0
# Ensure that the function behaves the same; this is from Scipy 0.7.0
assert_allclose(self.trace[3:5],
[[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01],
[-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]],
atol=1e-6, rtol=1e-7)
@suppressed_stdout
def test_ncg_hessp(self):
# Newton conjugate gradient with Hessian times a vector p.
if self.use_wrapper:
opts = {'maxiter': self.maxiter, 'disp': self.disp,
'return_all': False}
retval = optimize.minimize(self.func, self.startparams,
method='Newton-CG', jac=self.grad,
hessp=self.hessp,
args=(), options=opts)['x']
else:
retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,
fhess_p=self.hessp,
args=(), maxiter=self.maxiter,
full_output=False, disp=self.disp,
retall=False)
params = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
# Ensure that function call counts are 'known good'; these are from
# Scipy 0.7.0. Don't allow them to increase.
assert_(self.funccalls == 7, self.funccalls)
assert_(self.gradcalls <= 18, self.gradcalls) # 0.9.0
# assert_(self.gradcalls == 18, self.gradcalls) # 0.8.0
# assert_(self.gradcalls == 22, self.gradcalls) # 0.7.0
# Ensure that the function behaves the same; this is from Scipy 0.7.0
assert_allclose(self.trace[3:5],
[[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01],
[-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]],
atol=1e-6, rtol=1e-7)
class TestOptimizeWrapperDisp(CheckOptimizeParameterized):
use_wrapper = True
disp = True
class TestOptimizeWrapperNoDisp(CheckOptimizeParameterized):
use_wrapper = True
disp = False
class TestOptimizeNoWrapperDisp(CheckOptimizeParameterized):
use_wrapper = False
disp = True
class TestOptimizeNoWrapperNoDisp(CheckOptimizeParameterized):
use_wrapper = False
disp = False
class TestOptimizeSimple(CheckOptimize):
def test_bfgs_nan(self):
# Test corner case where nan is fed to optimizer. See gh-2067.
func = lambda x: x
fprime = lambda x: np.ones_like(x)
x0 = [np.nan]
with np.errstate(over='ignore', invalid='ignore'):
x = optimize.fmin_bfgs(func, x0, fprime, disp=False)
assert_(np.isnan(func(x)))
def test_bfgs_numerical_jacobian(self):
# BFGS with numerical jacobian and a vector epsilon parameter.
# define the epsilon parameter using a random vector
epsilon = np.sqrt(np.finfo(float).eps) * np.random.rand(len(self.solution))
params = optimize.fmin_bfgs(self.func, self.startparams,
epsilon=epsilon, args=(),
maxiter=self.maxiter, disp=False)
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
def test_bfgs_gh_2169(self):
def f(x):
if x < 0:
return 1.79769313e+308
else:
return x + 1./x
xs = optimize.fmin_bfgs(f, [10.], disp=False)
assert_allclose(xs, 1.0, rtol=1e-4, atol=1e-4)
def test_l_bfgs_b(self):
# limited-memory bound-constrained BFGS algorithm
retval = optimize.fmin_l_bfgs_b(self.func, self.startparams,
self.grad, args=(),
maxiter=self.maxiter)
(params, fopt, d) = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
# Ensure that function call counts are 'known good'; these are from
# Scipy 0.7.0. Don't allow them to increase.
assert_(self.funccalls == 7, self.funccalls)
assert_(self.gradcalls == 5, self.gradcalls)
# Ensure that the function behaves the same; this is from Scipy 0.7.0
assert_allclose(self.trace[3:5],
[[0., -0.52489628, 0.48753042],
[0., -0.52489628, 0.48753042]],
atol=1e-14, rtol=1e-7)
def test_l_bfgs_b_numjac(self):
# L-BFGS-B with numerical jacobian
retval = optimize.fmin_l_bfgs_b(self.func, self.startparams,
approx_grad=True,
maxiter=self.maxiter)
(params, fopt, d) = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
def test_l_bfgs_b_funjac(self):
# L-BFGS-B with combined objective function and jacobian
def fun(x):
return self.func(x), self.grad(x)
retval = optimize.fmin_l_bfgs_b(fun, self.startparams,
maxiter=self.maxiter)
(params, fopt, d) = retval
assert_allclose(self.func(params), self.func(self.solution),
atol=1e-6)
def test_minimize_l_bfgs_b(self):
# Minimize with L-BFGS-B method
opts = {'disp': False, 'maxiter': self.maxiter}
r = optimize.minimize(self.func, self.startparams,
method='L-BFGS-B', jac=self.grad,
options=opts)
assert_allclose(self.func(r.x), self.func(self.solution),
atol=1e-6)
# approximate jacobian
ra = optimize.minimize(self.func, self.startparams,
method='L-BFGS-B', options=opts)
assert_allclose(self.func(ra.x), self.func(self.solution),
atol=1e-6)
# check that function evaluations in approximate jacobian are counted
assert_(ra.nfev > r.nfev)
def test_minimize_l_bfgs_b_ftol(self):
# Check that the `ftol` parameter in l_bfgs_b works as expected
v0 = None
for tol in [1e-1, 1e-4, 1e-7, 1e-10]:
opts = {'disp': False, 'maxiter': self.maxiter, 'ftol': tol}
sol = optimize.minimize(self.func, self.startparams,
method='L-BFGS-B', jac=self.grad,
options=opts)
v = self.func(sol.x)
if v0 is None:
v0 = v
else:
assert_(v < v0)
assert_allclose(v, self.func(self.solution), rtol=tol)
def test_custom(self):
# This function comes from the documentation example.
def custmin(fun, x0, args=(), maxfev=None, stepsize=0.1,
maxiter=100, callback=None, **options):
bestx = x0
besty = fun(x0)
funcalls = 1
niter = 0
improved = True
stop = False
while improved and not stop and niter < maxiter:
improved = False
niter += 1
for dim in range(np.size(x0)):
for s in [bestx[dim] - stepsize, bestx[dim] + stepsize]:
testx = np.copy(bestx)
testx[dim] = s
testy = fun(testx, *args)
funcalls += 1
if testy < besty:
besty = testy
bestx = testx
improved = True
if callback is not None:
callback(bestx)
if maxfev is not None and funcalls >= maxfev:
stop = True
break
return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter,
nfev=funcalls, success=(niter > 1))
x0 = [1.35, 0.9, 0.8, 1.1, 1.2]
res = optimize.minimize(optimize.rosen, x0, method=custmin,
options=dict(stepsize=0.05))
assert_allclose(res.x, 1.0, rtol=1e-4, atol=1e-4)
def test_minimize_tol_parameter(self):
# Check that the minimize() tol= argument does something
def func(z):
x, y = z
return x**2*y**2 + x**4 + 1
def dfunc(z):
x, y = z
return np.array([2*x*y**2 + 4*x**3, 2*x**2*y])
for method in ['nelder-mead', 'powell', 'cg', 'bfgs',
'newton-cg', 'l-bfgs-b', 'tnc',
'cobyla', 'slsqp']:
if method in ('nelder-mead', 'powell', 'cobyla'):
jac = None
else:
jac = dfunc
sol1 = optimize.minimize(func, [1, 1], jac=jac, tol=1e-10,
method=method)
sol2 = optimize.minimize(func, [1, 1], jac=jac, tol=1.0,
method=method)
assert_(func(sol1.x) < func(sol2.x),
"%s: %s vs. %s" % (method, func(sol1.x), func(sol2.x)))
def test_no_increase(self):
# Check that the solver doesn't return a value worse than the
# initial point.
def func(x):
return (x - 1)**2
def bad_grad(x):
# purposefully invalid gradient function, simulates a case
# where line searches start failing
return 2*(x - 1) * (-1) - 2
def check(method):
x0 = np.array([2.0])
f0 = func(x0)
jac = bad_grad
if method in ['nelder-mead', 'powell', 'cobyla']:
jac = None
sol = optimize.minimize(func, x0, jac=jac, method=method,
options=dict(maxiter=20))
assert_equal(func(sol.x), sol.fun)
dec.knownfailureif(method == 'slsqp', "SLSQP returns slightly worse")(lambda: None)()
assert_(func(sol.x) <= f0)
for method in ['nelder-mead', 'powell', 'cg', 'bfgs',
'newton-cg', 'l-bfgs-b', 'tnc',
'cobyla', 'slsqp']:
yield check, method
def test_slsqp_respect_bounds(self):
# Regression test for gh-3108
def f(x):
return sum((x - np.array([1., 2., 3., 4.]))**2)
def cons(x):
a = np.array([[-1, -1, -1, -1], [-3, -3, -2, -1]])
return np.concatenate([np.dot(a, x) + np.array([5, 10]), x])
x0 = np.array([0.5, 1., 1.5, 2.])
res = optimize.minimize(f, x0, method='slsqp',
constraints={'type': 'ineq', 'fun': cons})
assert_allclose(res.x, np.array([0., 2, 5, 8])/3, atol=1e-12)
def test_minimize_automethod(self):
def f(x):
return x**2
def cons(x):
return x - 2
x0 = np.array([10.])
sol_0 = optimize.minimize(f, x0)
sol_1 = optimize.minimize(f, x0, constraints=[{'type': 'ineq', 'fun': cons}])
sol_2 = optimize.minimize(f, x0, bounds=[(5, 10)])
sol_3 = optimize.minimize(f, x0, constraints=[{'type': 'ineq', 'fun': cons}], bounds=[(5, 10)])
sol_4 = optimize.minimize(f, x0, constraints=[{'type': 'ineq', 'fun': cons}], bounds=[(1, 10)])
for sol in [sol_0, sol_1, sol_2, sol_3, sol_4]:
assert_(sol.success)
assert_allclose(sol_0.x, 0, atol=1e-8)
assert_allclose(sol_1.x, 2, atol=1e-8)
assert_allclose(sol_2.x, 5, atol=1e-8)
assert_allclose(sol_3.x, 5, atol=1e-8)
assert_allclose(sol_4.x, 2, atol=1e-8)
def test_minimize_coerce_args_param(self):
# Regression test for gh-3503
def Y(x, c):
return np.sum((x-c)**2)
def dY_dx(x, c=None):
return 2*(x-c)
c = np.array([3, 1, 4, 1, 5, 9, 2, 6, 5, 3, 5])
xinit = np.random.randn(len(c))
optimize.minimize(Y, xinit, jac=dY_dx, args=(c), method="BFGS")
class TestLBFGSBBounds(TestCase):
def setUp(self):
self.bounds = ((1, None), (None, None))
self.solution = (1, 0)
def fun(self, x, p=2.0):
return 1.0 / p * (x[0]**p + x[1]**p)
def jac(self, x, p=2.0):
return x**(p - 1)
def fj(self, x, p=2.0):
return self.fun(x, p), self.jac(x, p)
def test_l_bfgs_b_bounds(self):
x, f, d = optimize.fmin_l_bfgs_b(self.fun, [0, -1],
fprime=self.jac,
bounds=self.bounds)
assert_(d['warnflag'] == 0, d['task'])
assert_allclose(x, self.solution, atol=1e-6)
def test_l_bfgs_b_funjac(self):
# L-BFGS-B with fun and jac combined and extra arguments
x, f, d = optimize.fmin_l_bfgs_b(self.fj, [0, -1], args=(2.0, ),
bounds=self.bounds)
assert_(d['warnflag'] == 0, d['task'])
assert_allclose(x, self.solution, atol=1e-6)
def test_minimize_l_bfgs_b_bounds(self):
# Minimize with method='L-BFGS-B' with bounds
res = optimize.minimize(self.fun, [0, -1], method='L-BFGS-B',
jac=self.jac, bounds=self.bounds)
assert_(res['success'], res['message'])
assert_allclose(res.x, self.solution, atol=1e-6)
class TestOptimizeScalar(TestCase):
def setUp(self):
self.solution = 1.5
def fun(self, x, a=1.5):
"""Objective function"""
return (x - a)**2 - 0.8
def test_brent(self):
x = optimize.brent(self.fun)
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.brent(self.fun, brack=(-3, -2))
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.brent(self.fun, full_output=True)
assert_allclose(x[0], self.solution, atol=1e-6)
x = optimize.brent(self.fun, brack=(-15, -1, 15))
assert_allclose(x, self.solution, atol=1e-6)
def test_golden(self):
x = optimize.golden(self.fun)
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.golden(self.fun, brack=(-3, -2))
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.golden(self.fun, full_output=True)
assert_allclose(x[0], self.solution, atol=1e-6)
x = optimize.golden(self.fun, brack=(-15, -1, 15))
assert_allclose(x, self.solution, atol=1e-6)
def test_fminbound(self):
x = optimize.fminbound(self.fun, 0, 1)
assert_allclose(x, 1, atol=1e-4)
x = optimize.fminbound(self.fun, 1, 5)
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.fminbound(self.fun, np.array([1]), np.array([5]))
assert_allclose(x, self.solution, atol=1e-6)
assert_raises(ValueError, optimize.fminbound, self.fun, 5, 1)
def test_fminbound_scalar(self):
try:
optimize.fminbound(self.fun, np.zeros((1, 2)), 1)
self.fail("exception not raised")
except ValueError as e:
assert_('must be scalar' in str(e))
x = optimize.fminbound(self.fun, 1, np.array(5))
assert_allclose(x, self.solution, atol=1e-6)
def test_minimize_scalar(self):
# combine all tests above for the minimize_scalar wrapper
x = optimize.minimize_scalar(self.fun).x
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.minimize_scalar(self.fun, method='Brent')
assert_(x.success)
x = optimize.minimize_scalar(self.fun, method='Brent',
options=dict(maxiter=3))
assert_(not x.success)
x = optimize.minimize_scalar(self.fun, bracket=(-3, -2),
args=(1.5, ), method='Brent').x
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.minimize_scalar(self.fun, method='Brent',
args=(1.5,)).x
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15),
args=(1.5, ), method='Brent').x
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.minimize_scalar(self.fun, bracket=(-3, -2),
args=(1.5, ), method='golden').x
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.minimize_scalar(self.fun, method='golden',
args=(1.5,)).x
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15),
args=(1.5, ), method='golden').x
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.minimize_scalar(self.fun, bounds=(0, 1), args=(1.5,),
method='Bounded').x
assert_allclose(x, 1, atol=1e-4)
x = optimize.minimize_scalar(self.fun, bounds=(1, 5), args=(1.5, ),
method='bounded').x
assert_allclose(x, self.solution, atol=1e-6)
x = optimize.minimize_scalar(self.fun, bounds=(np.array([1]),
np.array([5])),
args=(np.array([1.5]), ),
method='bounded').x
assert_allclose(x, self.solution, atol=1e-6)
assert_raises(ValueError, optimize.minimize_scalar, self.fun,
bounds=(5, 1), method='bounded', args=(1.5, ))
assert_raises(ValueError, optimize.minimize_scalar, self.fun,
bounds=(np.zeros(2), 1), method='bounded', args=(1.5, ))
x = optimize.minimize_scalar(self.fun, bounds=(1, np.array(5)),
method='bounded').x
assert_allclose(x, self.solution, atol=1e-6)
def test_minimize_scalar_custom(self):
# This function comes from the documentation example.
def custmin(fun, bracket, args=(), maxfev=None, stepsize=0.1,
maxiter=100, callback=None, **options):
bestx = (bracket[1] + bracket[0]) / 2.0
besty = fun(bestx)
funcalls = 1
niter = 0
improved = True
stop = False
while improved and not stop and niter < maxiter:
improved = False
niter += 1
for testx in [bestx - stepsize, bestx + stepsize]:
testy = fun(testx, *args)
funcalls += 1
if testy < besty:
besty = testy
bestx = testx
improved = True
if callback is not None:
callback(bestx)
if maxfev is not None and funcalls >= maxfev:
stop = True
break
return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter,
nfev=funcalls, success=(niter > 1))
res = optimize.minimize_scalar(self.fun, bracket=(0, 4), method=custmin,
options=dict(stepsize=0.05))
assert_allclose(res.x, self.solution, atol=1e-6)
def test_minimize_scalar_coerce_args_param(self):
# Regression test for gh-3503
optimize.minimize_scalar(self.fun, args=1.5)
def test_brent_negative_tolerance():
assert_raises(ValueError, optimize.brent, np.cos, tol=-.01)
class TestNewtonCg(object):
def test_rosenbrock(self):
x0 = np.array([-1.2, 1.0])
sol = optimize.minimize(optimize.rosen, x0,
jac=optimize.rosen_der,
hess=optimize.rosen_hess,
tol=1e-5,
method='Newton-CG')
assert_(sol.success, sol.message)
assert_allclose(sol.x, np.array([1, 1]), rtol=1e-4)
def test_himmelblau(self):
x0 = np.array(himmelblau_x0)
sol = optimize.minimize(himmelblau,
x0,
jac=himmelblau_grad,
hess=himmelblau_hess,
method='Newton-CG',
tol=1e-6)
assert_(sol.success, sol.message)
assert_allclose(sol.x, himmelblau_xopt, rtol=1e-4)
assert_allclose(sol.fun, himmelblau_min, atol=1e-4)
class TestRosen(TestCase):
def test_hess(self):
# Compare rosen_hess(x) times p with rosen_hess_prod(x,p). See gh-1775
x = np.array([3, 4, 5])
p = np.array([2, 2, 2])
hp = optimize.rosen_hess_prod(x, p)
dothp = np.dot(optimize.rosen_hess(x), p)
assert_equal(hp, dothp)
def himmelblau(p):
"""
R^2 -> R^1 test function for optimization. The function has four local
minima where himmelblau(xopt) == 0.
"""
x, y = p
a = x*x + y - 11
b = x + y*y - 7
return a*a + b*b
def himmelblau_grad(p):
x, y = p
return np.array([4*x**3 + 4*x*y - 42*x + 2*y**2 - 14,
2*x**2 + 4*x*y + 4*y**3 - 26*y - 22])
def himmelblau_hess(p):
x, y = p
return np.array([[12*x**2 + 4*y - 42, 4*x + 4*y],
[4*x + 4*y, 4*x + 12*y**2 - 26]])
himmelblau_x0 = [-0.27, -0.9]
himmelblau_xopt = [3, 2]
himmelblau_min = 0.0
def test_minimize_multiple_constraints():
# Regression test for gh-4240.
def func(x):
return np.array([25 - 0.2 * x[0] - 0.4 * x[1] - 0.33 * x[2]])
def func1(x):
return np.array([x[1]])
def func2(x):
return np.array([x[2]])
cons = ({'type': 'ineq', 'fun': func},
{'type': 'ineq', 'fun': func1},
{'type': 'ineq', 'fun': func2})
f = lambda x: -1 * (x[0] + x[1] + x[2])
res = optimize.minimize(f, [0, 0, 0], method='SLSQP', constraints=cons)
assert_allclose(res.x, [125, 0, 0], atol=1e-10)
class TestOptimizeResultAttributes(TestCase):
# Test that all minimizers return an OptimizeResult containing
# all the OptimizeResult attributes
def setUp(self):
self.x0 = [5, 5]
self.func = optimize.rosen
self.jac = optimize.rosen_der
self.hess = optimize.rosen_hess
self.hessp = optimize.rosen_hess_prod
self.bounds = [(0., 10.), (0., 10.)]
def test_attributes_present(self):
methods = ['Nelder-Mead', 'Powell', 'CG', 'BFGS', 'Newton-CG',
'L-BFGS-B', 'TNC', 'COBYLA', 'SLSQP', 'dogleg',
'trust-ncg']
attributes = ['nit', 'nfev', 'x', 'success', 'status', 'fun',
'message']
skip = {'COBYLA': ['nit']}
for method in methods:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
res = optimize.minimize(self.func, self.x0, method=method,
jac=self.jac, hess=self.hess,
hessp=self.hessp)
for attribute in attributes:
if method in skip and attribute in skip[method]:
continue
assert_(hasattr(res, attribute))
class TestBrute:
# Test the "brute force" method
def setUp(self):
self.params = (2, 3, 7, 8, 9, 10, 44, -1, 2, 26, 1, -2, 0.5)
self.rranges = (slice(-4, 4, 0.25), slice(-4, 4, 0.25))
self.solution = np.array([-1.05665192, 1.80834843])
def f1(self, z, *params):
x, y = z
a, b, c, d, e, f, g, h, i, j, k, l, scale = params
return (a * x**2 + b * x * y + c * y**2 + d*x + e*y + f)
def f2(self, z, *params):
x, y = z
a, b, c, d, e, f, g, h, i, j, k, l, scale = params
return (-g*np.exp(-((x-h)**2 + (y-i)**2) / scale))
def f3(self, z, *params):
x, y = z
a, b, c, d, e, f, g, h, i, j, k, l, scale = params
return (-j*np.exp(-((x-k)**2 + (y-l)**2) / scale))
def func(self, z, *params):
return self.f1(z, *params) + self.f2(z, *params) + self.f3(z, *params)
@suppressed_stdout
def test_brute(self):
# test fmin
resbrute = optimize.brute(self.func, self.rranges, args=self.params,
full_output=True, finish=optimize.fmin)
assert_allclose(resbrute[0], self.solution, atol=1e-3)
assert_allclose(resbrute[1], self.func(self.solution, *self.params),
atol=1e-3)
# test minimize
resbrute = optimize.brute(self.func, self.rranges, args=self.params,
full_output=True,
finish=optimize.minimize)
assert_allclose(resbrute[0], self.solution, atol=1e-3)
assert_allclose(resbrute[1], self.func(self.solution, *self.params),
atol=1e-3)
if __name__ == "__main__":
run_module_suite()
|
nicko96/Chrome-Infra
|
refs/heads/master
|
appengine/findit/waterfall/test/extract_signal_pipeline_test.py
|
1
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from testing_utils import testing
from model.wf_step import WfStep
from pipeline_wrapper import pipeline_handlers
from waterfall import buildbot
from waterfall import extractors
from waterfall.extract_signal_pipeline import ExtractSignalPipeline
class ExtractSignalPipelineTest(testing.AppengineTestCase):
app_module = pipeline_handlers._APP
ABC_TEST_FAILURE_LOG = """
...
../../content/common/gpu/media/v4l2_video_encode_accelerator.cc:306:12:
...
"""
FAILURE_SIGNALS = {
"abc_test": {
"files": {
"content/common/gpu/media/v4l2_video_encode_accelerator.cc": [306]
},
"keywords": {},
"tests": []
}
}
FAILURE_INFO = {
'master_name': 'm',
'builder_name': 'b',
'build_number': 123,
'failed': True,
'chromium_revision': 'a_git_hash',
'failed_steps': {
'abc_test': {
'last_pass': 122,
'current_failure': 123,
'first_failure': 123,
}
}
}
def testExtractStorablePortionOfLogWithSmallLogData(self):
self.mock(ExtractSignalPipeline, 'LOG_DATA_BYTE_LIMIT', 500)
lines = [str(i) * 99 for i in range(3)]
log_data = '\n'.join(lines)
expected_result = log_data
result = ExtractSignalPipeline._ExtractStorablePortionOfLog(log_data)
self.assertEqual(expected_result, result)
def testExtractStorablePortionOfLogWithBigLogData(self):
self.mock(ExtractSignalPipeline, 'LOG_DATA_BYTE_LIMIT', 500)
lines = [str(9 - i) * 99 for i in range(9)]
log_data = '\n'.join(lines)
expected_result = '\n'.join(lines[-5:])
result = ExtractSignalPipeline._ExtractStorablePortionOfLog(log_data)
self.assertEqual(expected_result, result)
def testWfStepStdioLogAlreadyDownloaded(self):
master_name = 'm'
builder_name = 'b'
build_number = 123
step_name = 'abc_test'
step = WfStep.Create(master_name, builder_name, build_number, step_name)
step.log_data = self.ABC_TEST_FAILURE_LOG
step.put()
step_log_url = buildbot.CreateStdioLogUrl(
master_name, builder_name, build_number, step_name)
with self.mock_urlfetch() as urlfetch:
urlfetch.register_handler(step_log_url, 'If used, test should fail!')
pipeline = ExtractSignalPipeline(self.FAILURE_INFO)
signals = pipeline.run(self.FAILURE_INFO)
self.assertEqual(self.FAILURE_SIGNALS, signals)
def MockGetStdiolog(self, master_name, builder_name, build_number, step_name):
step_log_url = buildbot.CreateStdioLogUrl(
master_name, builder_name, build_number, step_name)
with self.mock_urlfetch() as urlfetch:
urlfetch.register_handler(step_log_url, self.ABC_TEST_FAILURE_LOG)
def testWfStepStdioLogNotDownloadedYet(self):
master_name = 'm'
builder_name = 'b'
build_number = 123
step_name = 'abc_test'
self.MockGetStdiolog(master_name, builder_name, build_number, step_name)
pipeline = ExtractSignalPipeline(self.FAILURE_INFO)
pipeline.start()
self.execute_queued_tasks()
step = WfStep.Create(master_name, builder_name, build_number, step_name)
self.assertIsNotNone(step)
def _GetGtestResultLog(self,
master_name, builder_name, build_number, step_name):
file_name = os.path.join(
os.path.dirname(__file__), 'data',
'%s_%s_%d_%s.json' % (master_name,
builder_name, build_number, step_name))
with open(file_name, 'r') as f:
return f.read()
def testGetTestLevelFailures(self):
master_name = 'm'
builder_name = 'b'
build_number = 123
step_name = 'abc_test'
expected_failure_log = ('ERROR:x_test.cc:1234\na/b/u2s1.cc:567: Failure\n'
'[2]: 2594735000 bogo-microseconds\n'
'ERROR:x_test.cc:1234\na/b/u2s1.cc:567: Failure\n'
'ERROR:x_test.cc:1234\na/b/u2s1.cc:567: Failure\n'
'ERROR:x_test.cc:1234\na/b/u2s1.cc:567: Failure\n'
'a/b/u3s2.cc:110: Failure\n'
'a/b/u3s2.cc:110: Failure\n'
'a/b/u3s2.cc:110: Failure\n'
'a/b/u3s2.cc:110: Failure\n'
)
step_log = self._GetGtestResultLog(master_name,
builder_name, build_number, step_name)
failed_test_log = ExtractSignalPipeline._GetReliableTestFailureLog(step_log)
self.assertEqual(expected_failure_log, failed_test_log)
def testGetTestLevelFailuresFlaky(self):
master_name = 'm'
builder_name = 'b'
build_number = 124
step_name = 'abc_test'
expected_failure_log = 'flaky'
step_log = self._GetGtestResultLog(master_name,
builder_name, build_number, step_name)
failed_test_log = ExtractSignalPipeline._GetReliableTestFailureLog(step_log)
self.assertEqual(expected_failure_log, failed_test_log)
def testGetTestLevelFailuresInvalid(self):
master_name = 'm'
builder_name = 'b'
build_number = 125
step_name = 'abc_test'
expected_failure_log = 'invalid'
step_log = self._GetGtestResultLog(master_name,
builder_name, build_number, step_name)
failed_test_log = ExtractSignalPipeline._GetReliableTestFailureLog(step_log)
self.assertEqual(expected_failure_log, failed_test_log)
def MockGetGtestJsonResult(self):
self.mock(buildbot, 'GetGtestResultLog',self._GetGtestResultLog)
def testGetSignalFromStepLog(self):
master_name = 'm'
builder_name = 'b'
build_number = 123
step_name = 'abc_test'
# Mock both stdiolog and gtest json results to test whether Findit will
# go to step log first when both logs exist.
self.MockGetStdiolog(master_name, builder_name, build_number, step_name)
self.MockGetGtestJsonResult()
pipeline = ExtractSignalPipeline(self.FAILURE_INFO)
signals = pipeline.run(self.FAILURE_INFO)
step = WfStep.Get(master_name, builder_name, build_number, step_name)
expected_files = {
'a/b/u2s1.cc': [567],
'a/b/u3s2.cc': [110]
}
self.assertIsNotNone(step)
self.assertIsNotNone(step.log_data)
self.assertEqual(expected_files, signals['abc_test']['files'])
def testGetSignalFromStepLogFlaky(self):
master_name = 'm'
builder_name = 'b'
build_number = 124
step_name = 'abc_test'
failure_info = {
'master_name': 'm',
'builder_name': 'b',
'build_number': 124,
'failed': True,
'chromium_revision': 'a_git_hash',
'failed_steps': {
'abc_test': {
'last_pass': 123,
'current_failure': 124,
'first_failure': 124,
}
}
}
self.MockGetStdiolog(master_name, builder_name, build_number, step_name)
self.MockGetGtestJsonResult()
pipeline = ExtractSignalPipeline()
signals = pipeline.run(failure_info)
step = WfStep.Get(master_name, builder_name, build_number, step_name)
self.assertIsNotNone(step)
self.assertIsNotNone(step.log_data)
self.assertEqual('flaky', step.log_data)
self.assertEqual({}, signals['abc_test']['files'])
def testGetSignalFromStepLogInvalid(self):
master_name = 'm'
builder_name = 'b'
build_number = 125
step_name = 'abc_test'
failure_info = {
'master_name': 'm',
'builder_name': 'b',
'build_number': 125,
'failed': True,
'chromium_revision': 'a_git_hash',
'failed_steps': {
'abc_test': {
'last_pass': 124,
'current_failure': 125,
'first_failure': 125,
}
}
}
self.MockGetStdiolog(master_name, builder_name, build_number, step_name)
self.MockGetGtestJsonResult()
pipeline = ExtractSignalPipeline()
signals = pipeline.run(failure_info)
step = WfStep.Get(master_name, builder_name, build_number, step_name)
expected_files = {
'content/common/gpu/media/v4l2_video_encode_accelerator.cc': [306]
}
self.assertIsNotNone(step)
self.assertIsNotNone(step.log_data)
self.assertEqual(expected_files, signals['abc_test']['files'])
def testBailOutIfNotAFailedBuild(self):
failure_info = {
'failed': False,
}
expected_signals = {}
pipeline = ExtractSignalPipeline()
signals = pipeline.run(failure_info)
self.assertEqual(expected_signals, signals)
def testBailOutIfNoValidChromiumRevision(self):
failure_info = {
'failed': True,
'chromium_revision': None,
}
expected_signals = {}
pipeline = ExtractSignalPipeline()
signals = pipeline.run(failure_info)
self.assertEqual(expected_signals, signals)
|
calamityman/ansible-modules-extras
|
refs/heads/devel
|
cloud/webfaction/webfaction_mailbox.py
|
102
|
#!/usr/bin/python
#
# Create webfaction mailbox using Ansible and the Webfaction API
#
# ------------------------------------------
# (c) Quentin Stafford-Fraser and Andy Baker 2015
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: webfaction_mailbox
short_description: Add or remove mailboxes on Webfaction
description:
- Add or remove mailboxes on a Webfaction account. Further documentation at http://github.com/quentinsf/ansible-webfaction.
author: Quentin Stafford-Fraser (@quentinsf)
version_added: "2.0"
notes:
- "You can run playbooks that use this on a local machine, or on a Webfaction host, or elsewhere, since the scripts use the remote webfaction API - the location is not important. However, running them on multiple hosts I(simultaneously) is best avoided. If you don't specify I(localhost) as your host, you may want to add C(serial: 1) to the plays."
- See `the webfaction API <http://docs.webfaction.com/xmlrpc-api/>`_ for more info.
options:
mailbox_name:
description:
- The name of the mailbox
required: true
mailbox_password:
description:
- The password for the mailbox
required: true
default: null
state:
description:
- Whether the mailbox should exist
required: false
choices: ['present', 'absent']
default: "present"
login_name:
description:
- The webfaction account to use
required: true
login_password:
description:
- The webfaction password to use
required: true
'''
EXAMPLES = '''
- name: Create a mailbox
webfaction_mailbox:
mailbox_name="mybox"
mailbox_password="myboxpw"
state=present
login_name={{webfaction_user}}
login_password={{webfaction_passwd}}
'''
import socket
import xmlrpclib
webfaction = xmlrpclib.ServerProxy('https://api.webfaction.com/')
def main():
module = AnsibleModule(
argument_spec=dict(
mailbox_name=dict(required=True),
mailbox_password=dict(required=True),
state=dict(required=False, choices=['present', 'absent'], default='present'),
login_name=dict(required=True),
login_password=dict(required=True),
),
supports_check_mode=True
)
mailbox_name = module.params['mailbox_name']
site_state = module.params['state']
session_id, account = webfaction.login(
module.params['login_name'],
module.params['login_password']
)
mailbox_list = [x['mailbox'] for x in webfaction.list_mailboxes(session_id)]
existing_mailbox = mailbox_name in mailbox_list
result = {}
# Here's where the real stuff happens
if site_state == 'present':
# Does a mailbox with this name already exist?
if existing_mailbox:
module.exit_json(changed=False,)
positional_args = [session_id, mailbox_name]
if not module.check_mode:
# If this isn't a dry run, create the mailbox
result.update(webfaction.create_mailbox(*positional_args))
elif site_state == 'absent':
# If the mailbox is already not there, nothing changed.
if not existing_mailbox:
module.exit_json(changed=False)
if not module.check_mode:
# If this isn't a dry run, delete the mailbox
result.update(webfaction.delete_mailbox(session_id, mailbox_name))
else:
module.fail_json(msg="Unknown state specified: {}".format(site_state))
module.exit_json(changed=True, result=result)
from ansible.module_utils.basic import *
main()
|
ehenneken/adsrex
|
refs/heads/master
|
v1_0/api/myads.py
|
1
|
from ..user_roles import anonymous_user, authenticated_user
def test_anonymous_user():
for x in ['/vault/configuration',
'/vault/user-data',
'/vault/query/sfsfs-sfsdfsdf-sfsdf-sfsdf']:
r = anonymous_user.get(x)
assert r.status_code == 401
# this is wrong, it should be accessible
r = anonymous_user.get('/vault/query2svg/113dc6ef2e612ffe1a0de9a16e7f494e')
assert r.status_code == 200
def test_authenticated_user():
# bumblebee config
r = authenticated_user.get('/vault/configuration')
assert r.status_code == 200
assert isinstance(r.json(), dict)
assert 'link_servers' in r.json()
r = authenticated_user.get('/vault/configuration/link_servers')
assert r.status_code == 200
assert isinstance(r.json(), list)
# server side user storage
r = authenticated_user.post('/vault/user-data', json={'link_server': 'foo'})
assert r.status_code == 200
assert r.json()['link_server'] == 'foo'
r = authenticated_user.get('/vault/user-data')
assert r.status_code == 200
assert isinstance(r.json(), dict)
assert r.json()['link_server'] == 'foo'
# i'm using my own access token, once we switch to a dedicated account
# made only for testing, the qid will change too
r = authenticated_user.post('/vault/query', json={'q': '*:*'})
assert r.status_code == 200
assert isinstance(r.json(), dict)
qid = r.json()['qid']
r = authenticated_user.get('/vault/query/%s' % qid)
assert r.status_code == 200
assert 'numfound' in r.json()
r = authenticated_user.get('/vault/execute_query/%s' % qid)
assert r.status_code == 200
assert r.json()['responseHeader']['params']['q'] == '*:*'
assert r.json()['responseHeader']['params']['fl'] == 'id'
assert r.json()['response']
r = authenticated_user.get('/vault/execute_query/%s?fl=recid' % qid)
assert r.status_code == 200
assert r.json()['responseHeader']['params']['q'] == '*:*'
assert r.json()['responseHeader']['params']['fl'] == 'recid'
assert r.json()['response']
# 113dc6ef2e612ffe1a0de9a16e7f494e
r = authenticated_user.get('/vault/query2svg/%s' % qid)
assert 'svg' in r.text
assert r.headers.get('Content-Type') == 'image/svg+xml'
|
Mortezaipo/cmd-test
|
refs/heads/master
|
src/lib.py
|
1
|
"""Extra library."""
import os
import socket
import re
import json
import const
def is_file_exists(path: str):
"""Check file existance.
:param path: file address
:type path: str
:return: Checking file existance
:rtype: bool
"""
if os.path.exists(path):
return True
return False
def has_file_read_permission(path: str):
"""Check file read permission.
:param path: file address
:type path: str
:return: Checking file read permission
:rtype: bool
"""
if os.access(path, os.R_OK):
return True
return False
def is_rpc_addr_establish(addr: str):
"""Check RPC addr connection.
:param addr: Connection address
:type addr: str
:return: Checking connection result
:rtype: bool
"""
check_port = addr.rfind(":")
if check_port:
addr_url = addr[:check_port]
if not addr[check_port+1:].isdigit():
return False
addr_port = int(addr[check_port+1:])
else:
addr_url = addr
addr_port = 80
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((addr_url, addr_port))
s.close()
return True
except (ConnectionRefusedError, socket.gaierror):
return False
def load_config_file(path: str):
"""Check config file content.
:param path: file address
:type path: str
:return: Loading config file
:rtype: str
"""
try:
return json.load(path)
except json.JSONDecodeError:
return ""
def validate_config_structure(config: dict):
"""Validate config content.
:param config: config body
:type config: dict
:return: validation result in digit
:rtype: int
"""
# 'action' key/value validation
if not config.get("actions"):
return const.ACTION_KEY_NOT_FOUND
if type(config["actions"]) is not list:
return const.ACTION_VALUE_INVALID_TYPE
if len(config["actions"]) == 0:
return const.ACTION_VALUE_IS_EMPTY
for item in config["actions"]:
# 'command' key/value validation
if item.get("command") is None:
return const.COMMAND_KEY_NOT_FOUND
if type(item["command"]) is str:
return const.COMMAND_VALUE_INVALID_TYPE
if len(item["command"]) == 0:
return const.COMMAND_VALUE_IS_EMPTY
# 'process' key/value validation
if item.get("process") is None:
return const.PROCESS_KEY_NOT_FOUND
if type(item["process"]) is not int:
return const.PROCESS_VALUE_INVALID_TYPE
if item["process"] <= 0:
return const.PROCESS_VALUE_INVALID_VALUE
# 'thread' key/value validation
if item.get("thread") is None:
return const.THREAD_KEY_NOT_FOUND
if type(item["thread"]) is not int:
return const.THREAD_VALUE_INVALID_TYPE
if item["thread"] <= 0:
return const.THREAD_VALUE_INVALID_VALUE
# 'io' key/value validation
if item.get("io") is None:
return const.IO_KEY_NOT_FOUND
if type(item["io"]) is not list:
return const.IO_VALUE_INVALID_TYPE
if len(item["io"]) == 0:
return const.IO_VALUE_INVALID_VALUE
return True
|
realfake/kubernetes
|
refs/heads/master
|
cluster/juju/layers/kubernetes-worker/lib/charms/kubernetes/common.py
|
366
|
#!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import subprocess
def get_version(bin_name):
"""Get the version of an installed Kubernetes binary.
:param str bin_name: Name of binary
:return: 3-tuple version (maj, min, patch)
Example::
>>> `get_version('kubelet')
(1, 6, 0)
"""
cmd = '{} --version'.format(bin_name).split()
version_string = subprocess.check_output(cmd).decode('utf-8')
return tuple(int(q) for q in re.findall("[0-9]+", version_string)[:3])
|
jaden/2014-hour-of-code
|
refs/heads/master
|
python/guess2.py
|
1
|
from random import randint
answer = randint(1,100)
guess = 0
tries = 0
while guess != answer:
guess = int(raw_input("Guess a number from 1 to 100: "))
tries += 1
if guess < answer:
print "Too low"
elif guess > answer:
print "Too high"
print "Correct, you guessed it in %d tries" % tries
|
mgr0dzicki/python-neo
|
refs/heads/master
|
neo/test/iotest/test_brainwaresrcio.py
|
6
|
# -*- coding: utf-8 -*-
"""
Tests of neo.io.brainwaresrcio
"""
# needed for python 3 compatibility
from __future__ import absolute_import, division, print_function
import logging
import os.path
import sys
try:
import unittest2 as unittest
except ImportError:
import unittest
import numpy as np
import quantities as pq
from neo.core import (Block, Event,
ChannelIndex, Segment, SpikeTrain, Unit)
from neo.io import BrainwareSrcIO, brainwaresrcio
from neo.test.iotest.common_io_test import BaseTestIO
from neo.test.tools import (assert_same_sub_schema,
assert_neo_object_is_compliant)
from neo.test.iotest.tools import create_generic_reader
PY_VER = sys.version_info[0]
FILES_TO_TEST = ['block_300ms_4rep_1clust_part_ch1.src',
'block_500ms_5rep_empty_fullclust_ch1.src',
'block_500ms_5rep_empty_partclust_ch1.src',
'interleaved_500ms_5rep_ch2.src',
'interleaved_500ms_5rep_nospikes_ch1.src',
'interleaved_500ms_7rep_noclust_ch1.src',
'long_170s_1rep_1clust_ch2.src',
'multi_500ms_mulitrep_ch1.src',
'random_500ms_12rep_noclust_part_ch2.src',
'sequence_500ms_5rep_ch2.src']
FILES_TO_COMPARE = ['block_300ms_4rep_1clust_part_ch1',
'block_500ms_5rep_empty_fullclust_ch1',
'block_500ms_5rep_empty_partclust_ch1',
'interleaved_500ms_5rep_ch2',
'interleaved_500ms_5rep_nospikes_ch1',
'interleaved_500ms_7rep_noclust_ch1',
'',
'multi_500ms_mulitrep_ch1',
'random_500ms_12rep_noclust_part_ch2',
'sequence_500ms_5rep_ch2']
def proc_src(filename):
'''Load an src file that has already been processed by the official matlab
file converter. That matlab data is saved to an m-file, which is then
converted to a numpy '.npz' file. This numpy file is the file actually
loaded. This function converts it to a neo block and returns the block.
This block can be compared to the block produced by BrainwareSrcIO to
make sure BrainwareSrcIO is working properly
block = proc_src(filename)
filename: The file name of the numpy file to load. It should end with
'*_src_py?.npz'. This will be converted to a neo 'file_origin' property
with the value '*.src', so the filename to compare should fit that pattern.
'py?' should be 'py2' for the python 2 version of the numpy file or 'py3'
for the python 3 version of the numpy file.
example: filename = 'file1_src_py2.npz'
src file name = 'file1.src'
'''
with np.load(filename) as srcobj:
srcfile = srcobj.items()[0][1]
filename = os.path.basename(filename[:-12]+'.src')
block = Block(file_origin=filename)
NChannels = srcfile['NChannels'][0, 0][0, 0]
side = str(srcfile['side'][0, 0][0])
ADperiod = srcfile['ADperiod'][0, 0][0, 0]
comm_seg = proc_src_comments(srcfile, filename)
block.segments.append(comm_seg)
chx = proc_src_units(srcfile, filename)
chan_nums = np.arange(NChannels, dtype='int')
chan_names = ['Chan{}'.format(i) for i in range(NChannels)]
chx.index = chan_nums
chx.channel_names = np.array(chan_names, dtype='string_')
block.channel_indexes.append(chx)
for rep in srcfile['sets'][0, 0].flatten():
proc_src_condition(rep, filename, ADperiod, side, block)
block.create_many_to_one_relationship()
return block
def proc_src_comments(srcfile, filename):
'''Get the comments in an src file that has been#!N
processed by the official
matlab function. See proc_src for details'''
comm_seg = Segment(name='Comments', file_origin=filename)
commentarray = srcfile['comments'].flatten()[0]
senders = [res[0] for res in commentarray['sender'].flatten()]
texts = [res[0] for res in commentarray['text'].flatten()]
timeStamps = [res[0, 0] for res in commentarray['timeStamp'].flatten()]
timeStamps = np.array(timeStamps, dtype=np.float32)
t_start = timeStamps.min()
timeStamps = pq.Quantity(timeStamps-t_start, units=pq.d).rescale(pq.s)
texts = np.array(texts, dtype='S')
senders = np.array(senders, dtype='S')
t_start = brainwaresrcio.convert_brainwaresrc_timestamp(t_start.tolist())
comments = Event(times=timeStamps, labels=texts, senders=senders)
comm_seg.events = [comments]
comm_seg.rec_datetime = t_start
return comm_seg
def proc_src_units(srcfile, filename):
'''Get the units in an src file that has been processed by the official
matlab function. See proc_src for details'''
chx = ChannelIndex(file_origin=filename,
index=np.array([], dtype=int))
un_unit = Unit(name='UnassignedSpikes', file_origin=filename,
elliptic=[], boundaries=[], timestamp=[], max_valid=[])
chx.units.append(un_unit)
sortInfo = srcfile['sortInfo'][0, 0]
timeslice = sortInfo['timeslice'][0, 0]
maxValid = timeslice['maxValid'][0, 0]
cluster = timeslice['cluster'][0, 0]
if len(cluster):
maxValid = maxValid[0, 0]
elliptic = [res.flatten() for res in cluster['elliptic'].flatten()]
boundaries = [res.flatten() for res in cluster['boundaries'].flatten()]
fullclust = zip(elliptic, boundaries)
for ielliptic, iboundaries in fullclust:
unit = Unit(file_origin=filename,
boundaries=[iboundaries],
elliptic=[ielliptic], timeStamp=[],
max_valid=[maxValid])
chx.units.append(unit)
return chx
def proc_src_condition(rep, filename, ADperiod, side, block):
'''Get the condition in a src file that has been processed by the official
matlab function. See proc_src for details'''
chx = block.channel_indexes[0]
stim = rep['stim'].flatten()
params = [str(res[0]) for res in stim['paramName'][0].flatten()]
values = [res for res in stim['paramVal'][0].flatten()]
stim = dict(zip(params, values))
sweepLen = rep['sweepLen'][0, 0]
if not len(rep):
return
unassignedSpikes = rep['unassignedSpikes'].flatten()
if len(unassignedSpikes):
damaIndexes = [res[0, 0] for res in unassignedSpikes['damaIndex']]
timeStamps = [res[0, 0] for res in unassignedSpikes['timeStamp']]
spikeunit = [res.flatten() for res in unassignedSpikes['spikes']]
respWin = np.array([], dtype=np.int32)
trains = proc_src_condition_unit(spikeunit, sweepLen, side, ADperiod,
respWin, damaIndexes, timeStamps,
filename)
chx.units[0].spiketrains.extend(trains)
atrains = [trains]
else:
damaIndexes = []
timeStamps = []
atrains = []
clusters = rep['clusters'].flatten()
if len(clusters):
IdStrings = [res[0] for res in clusters['IdString']]
sweepLens = [res[0, 0] for res in clusters['sweepLen']]
respWins = [res.flatten() for res in clusters['respWin']]
spikeunits = []
for cluster in clusters['sweeps']:
if len(cluster):
spikes = [res.flatten() for res in
cluster['spikes'].flatten()]
else:
spikes = []
spikeunits.append(spikes)
else:
IdStrings = []
sweepLens = []
respWins = []
spikeunits = []
for unit, IdString in zip(chx.units[1:], IdStrings):
unit.name = str(IdString)
fullunit = zip(spikeunits, chx.units[1:], sweepLens, respWins)
for spikeunit, unit, sweepLen, respWin in fullunit:
trains = proc_src_condition_unit(spikeunit, sweepLen, side, ADperiod,
respWin, damaIndexes, timeStamps,
filename)
atrains.append(trains)
unit.spiketrains.extend(trains)
atrains = zip(*atrains)
for trains in atrains:
segment = Segment(file_origin=filename, feature_type=-1,
go_by_closest_unit_center=False,
include_unit_bounds=False, **stim)
block.segments.append(segment)
segment.spiketrains = trains
def proc_src_condition_unit(spikeunit, sweepLen, side, ADperiod, respWin,
damaIndexes, timeStamps, filename):
'''Get the unit in a condition in a src file that has been processed by
the official matlab function. See proc_src for details'''
if not damaIndexes:
damaIndexes = [0]*len(spikeunit)
timeStamps = [0]*len(spikeunit)
trains = []
for sweep, damaIndex, timeStamp in zip(spikeunit, damaIndexes,
timeStamps):
timeStamp = brainwaresrcio.convert_brainwaresrc_timestamp(timeStamp)
train = proc_src_condition_unit_repetition(sweep, damaIndex,
timeStamp, sweepLen,
side, ADperiod, respWin,
filename)
trains.append(train)
return trains
def proc_src_condition_unit_repetition(sweep, damaIndex, timeStamp, sweepLen,
side, ADperiod, respWin, filename):
'''Get the repetion for a unit in a condition in a src file that has been
processed by the official matlab function. See proc_src for details'''
damaIndex = damaIndex.astype('int32')
if len(sweep):
times = np.array([res[0, 0] for res in sweep['time']])
shapes = np.concatenate([res.flatten()[np.newaxis][np.newaxis] for res
in sweep['shape']], axis=0)
trig2 = np.array([res[0, 0] for res in sweep['trig2']])
else:
times = np.array([])
shapes = np.array([[[]]])
trig2 = np.array([])
times = pq.Quantity(times, units=pq.ms, dtype=np.float32)
t_start = pq.Quantity(0, units=pq.ms, dtype=np.float32)
t_stop = pq.Quantity(sweepLen, units=pq.ms, dtype=np.float32)
trig2 = pq.Quantity(trig2, units=pq.ms, dtype=np.uint8)
waveforms = pq.Quantity(shapes, dtype=np.int8, units=pq.mV)
sampling_period = pq.Quantity(ADperiod, units=pq.us)
train = SpikeTrain(times=times, t_start=t_start, t_stop=t_stop,
trig2=trig2, dtype=np.float32, timestamp=timeStamp,
dama_index=damaIndex, side=side, copy=True,
respwin=respWin, waveforms=waveforms,
file_origin=filename)
train.annotations['side'] = side
train.sampling_period = sampling_period
return train
class BrainwareSrcIOTestCase(BaseTestIO, unittest.TestCase):
'''
Unit test testcase for neo.io.BrainwareSrcIO
'''
ioclass = BrainwareSrcIO
read_and_write_is_bijective = False
# These are the files it tries to read and test for compliance
files_to_test = FILES_TO_TEST
# these are reference files to compare to
files_to_compare = FILES_TO_COMPARE
# add the appropriate suffix depending on the python version
for i, fname in enumerate(files_to_compare):
if fname:
files_to_compare[i] += '_src_py%s.npz' % PY_VER
# Will fetch from g-node if they don't already exist locally
# How does it know to do this before any of the other tests?
files_to_download = files_to_test + files_to_compare
def setUp(self):
super(BrainwareSrcIOTestCase, self).setUp()
def test_reading_same(self):
for ioobj, path in self.iter_io_objects(return_path=True):
obj_reader_all = create_generic_reader(ioobj, readall=True)
obj_reader_base = create_generic_reader(ioobj, target=False)
obj_reader_next = create_generic_reader(ioobj, target='next_block')
obj_reader_single = create_generic_reader(ioobj)
obj_all = obj_reader_all()
obj_base = obj_reader_base()
obj_single = obj_reader_single()
obj_next = [obj_reader_next()]
while ioobj._isopen:
obj_next.append(obj_reader_next())
try:
assert_same_sub_schema(obj_all[0], obj_base)
assert_same_sub_schema(obj_all[0], obj_single)
assert_same_sub_schema(obj_all, obj_next)
except BaseException as exc:
exc.args += ('from ' + os.path.basename(path),)
raise
self.assertEqual(len(obj_all), len(obj_next))
def test_against_reference(self):
for filename, refname in zip(self.files_to_test,
self.files_to_compare):
if not refname:
continue
obj = self.read_file(filename=filename, readall=True)[0]
refobj = proc_src(self.get_filename_path(refname))
try:
assert_neo_object_is_compliant(obj)
assert_neo_object_is_compliant(refobj)
assert_same_sub_schema(obj, refobj)
except BaseException as exc:
exc.args += ('from ' + filename,)
raise
if __name__ == '__main__':
logger = logging.getLogger(BrainwareSrcIO.__module__ +
'.' +
BrainwareSrcIO.__name__)
logger.setLevel(100)
unittest.main()
|
CaoShuFeng/kubernetes
|
refs/heads/master
|
cluster/juju/layers/kubernetes-e2e/reactive/kubernetes_e2e.py
|
169
|
#!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charms import layer
from charms.layer import snap
from charms.reactive import hook
from charms.reactive import is_state
from charms.reactive import set_state
from charms.reactive import when
from charms.reactive import when_not
from charms.reactive.helpers import data_changed
from charmhelpers.core import hookenv, unitdata
from shlex import split
from subprocess import check_call
from subprocess import check_output
db = unitdata.kv()
USER = 'system:e2e'
@hook('upgrade-charm')
def reset_delivery_states():
''' Remove the state set when resources are unpacked. '''
install_snaps()
@when('kubernetes-e2e.installed')
def report_status():
''' Report the status of the charm. '''
messaging()
def messaging():
''' Probe our relations to determine the proper messaging to the
end user '''
missing_services = []
if not is_state('kubernetes-master.available'):
missing_services.append('kubernetes-master:http')
if not is_state('certificates.available'):
missing_services.append('certificates')
if not is_state('kubeconfig.ready'):
missing_services.append('kubernetes-master:kube-control')
if missing_services:
if len(missing_services) > 1:
subject = 'relations'
else:
subject = 'relation'
services = ','.join(missing_services)
message = 'Missing {0}: {1}'.format(subject, services)
hookenv.status_set('blocked', message)
return
hookenv.status_set('active', 'Ready to test.')
@when('config.changed.channel')
def channel_changed():
install_snaps()
def install_snaps():
''' Deliver the e2e and kubectl components from the binary resource stream
packages declared in the charm '''
channel = hookenv.config('channel')
hookenv.status_set('maintenance', 'Installing kubectl snap')
snap.install('kubectl', channel=channel, classic=True)
hookenv.status_set('maintenance', 'Installing kubernetes-test snap')
snap.install('kubernetes-test', channel=channel, classic=True)
set_state('kubernetes-e2e.installed')
@when('tls_client.ca.saved', 'tls_client.client.certificate.saved',
'tls_client.client.key.saved', 'kubernetes-master.available',
'kubernetes-e2e.installed', 'e2e.auth.bootstrapped')
@when_not('kubeconfig.ready')
def prepare_kubeconfig_certificates(master):
''' Prepare the data to feed to create the kubeconfig file. '''
layer_options = layer.options('tls-client')
# Get all the paths to the tls information required for kubeconfig.
ca = layer_options.get('ca_certificate_path')
creds = db.get('credentials')
data_changed('kube-control.creds', creds)
servers = get_kube_api_servers(master)
# pedantry
kubeconfig_path = '/home/ubuntu/.kube/config'
# Create kubernetes configuration in the default location for ubuntu.
create_kubeconfig('/root/.kube/config', servers[0], ca,
token=creds['client_token'], user='root')
create_kubeconfig(kubeconfig_path, servers[0], ca,
token=creds['client_token'], user='ubuntu')
# Set permissions on the ubuntu users kubeconfig to ensure a consistent UX
cmd = ['chown', 'ubuntu:ubuntu', kubeconfig_path]
check_call(cmd)
messaging()
set_state('kubeconfig.ready')
@when('kube-control.connected')
def request_credentials(kube_control):
""" Request authorization creds."""
# Ask for a user, although we will be using the 'client_token'
kube_control.set_auth_request(USER)
@when('kube-control.auth.available')
def catch_change_in_creds(kube_control):
"""Request a service restart in case credential updates were detected."""
creds = kube_control.get_auth_credentials(USER)
if creds \
and data_changed('kube-control.creds', creds) \
and creds['user'] == USER:
# We need to cache the credentials here because if the
# master changes (master leader dies and replaced by a new one)
# the new master will have no recollection of our certs.
db.set('credentials', creds)
set_state('e2e.auth.bootstrapped')
@when('kubernetes-e2e.installed', 'kubeconfig.ready')
def set_app_version():
''' Declare the application version to juju '''
cmd = ['kubectl', 'version', '--client']
from subprocess import CalledProcessError
try:
version = check_output(cmd).decode('utf-8')
except CalledProcessError:
message = "Missing kubeconfig causes errors. Skipping version set."
hookenv.log(message)
return
git_version = version.split('GitVersion:"v')[-1]
version_from = git_version.split('",')[0]
hookenv.application_version_set(version_from.rstrip())
def create_kubeconfig(kubeconfig, server, ca, key=None, certificate=None,
user='ubuntu', context='juju-context',
cluster='juju-cluster', password=None, token=None):
'''Create a configuration for Kubernetes based on path using the supplied
arguments for values of the Kubernetes server, CA, key, certificate, user
context and cluster.'''
if not key and not certificate and not password and not token:
raise ValueError('Missing authentication mechanism.')
# token and password are mutually exclusive. Error early if both are
# present. The developer has requested an impossible situation.
# see: kubectl config set-credentials --help
if token and password:
raise ValueError('Token and Password are mutually exclusive.')
# Create the config file with the address of the master server.
cmd = 'kubectl config --kubeconfig={0} set-cluster {1} ' \
'--server={2} --certificate-authority={3} --embed-certs=true'
check_call(split(cmd.format(kubeconfig, cluster, server, ca)))
# Delete old users
cmd = 'kubectl config --kubeconfig={0} unset users'
check_call(split(cmd.format(kubeconfig)))
# Create the credentials using the client flags.
cmd = 'kubectl config --kubeconfig={0} ' \
'set-credentials {1} '.format(kubeconfig, user)
if key and certificate:
cmd = '{0} --client-key={1} --client-certificate={2} '\
'--embed-certs=true'.format(cmd, key, certificate)
if password:
cmd = "{0} --username={1} --password={2}".format(cmd, user, password)
# This is mutually exclusive from password. They will not work together.
if token:
cmd = "{0} --token={1}".format(cmd, token)
check_call(split(cmd))
# Create a default context with the cluster.
cmd = 'kubectl config --kubeconfig={0} set-context {1} ' \
'--cluster={2} --user={3}'
check_call(split(cmd.format(kubeconfig, context, cluster, user)))
# Make the config use this new context.
cmd = 'kubectl config --kubeconfig={0} use-context {1}'
check_call(split(cmd.format(kubeconfig, context)))
def get_kube_api_servers(master):
'''Return the kubernetes api server address and port for this
relationship.'''
hosts = []
# Iterate over every service from the relation object.
for service in master.services():
for unit in service['hosts']:
hosts.append('https://{0}:{1}'.format(unit['hostname'],
unit['port']))
return hosts
def determine_arch():
''' dpkg wrapper to surface the architecture we are tied to'''
cmd = ['dpkg', '--print-architecture']
output = check_output(cmd).decode('utf-8')
return output.rstrip()
|
bakhtout/odoo-educ
|
refs/heads/8.0
|
addons/website_sale_delivery/controllers/main.py
|
214
|
# -*- coding: utf-8 -*-
import openerp
from openerp import http
from openerp.http import request
import openerp.addons.website_sale.controllers.main
class website_sale(openerp.addons.website_sale.controllers.main.website_sale):
@http.route(['/shop/payment'], type='http', auth="public", website=True)
def payment(self, **post):
cr, uid, context = request.cr, request.uid, request.context
order = request.website.sale_get_order(context=context)
carrier_id = post.get('carrier_id')
if carrier_id:
carrier_id = int(carrier_id)
if order:
request.registry['sale.order']._check_carrier_quotation(cr, uid, order, force_carrier_id=carrier_id, context=context)
if carrier_id:
return request.redirect("/shop/payment")
res = super(website_sale, self).payment(**post)
return res
def order_lines_2_google_api(self, order_lines):
""" Transforms a list of order lines into a dict for google analytics """
order_lines_not_delivery = [line for line in order_lines if not line.is_delivery]
return super(website_sale, self).order_lines_2_google_api(order_lines_not_delivery)
|
pierrebaque/DeepOcclusion
|
refs/heads/master
|
VGG/BGsubstract.py
|
2
|
#Compute the probability of each pixel in a tensor to be from background or foreground
import os
import pickle
import numpy as np
from PIL import Image, ImageDraw
import theano
import theano.tensor as T
from theano.tensor.nnet.conv import conv2d
from theano.tensor.shared_randomstreams import RandomStreams
class BGsubstract(object):
#Input :
# - x : input tensor, should be activation volume coming out of AlexNet CNN
# - y : desired segmentation if we want to do training
def __init__(self, x_activ, pretrained = False):
#predefined number of activation (should be same as x.shape[1])
nb_activations = 4227
#Reshape our tensors to 2D matrix with samples in first dim and channels activation in second dim
x_activ_flat = x_activ.dimshuffle(0,2,3,1).reshape((x_activ.shape[0]*x_activ.shape[2]*x_activ.shape[3],x_activ.shape[1]))
#will do a standart NN with one hidden layer, set size here
nb_hiddens = 50
nb_hiddens2 = 50
# define model: neural network
def floatX(x):
return np.asarray(x, dtype=theano.config.floatX)
def init_weights(shape):
return theano.shared(floatX(np.random.randn(*shape) * 1e-3))
def dropout(x, p=0.0):
if p > 0:
retain_prob = 1 - p
#x has smaples in first dim and features in second dim, what we want is get only some features => binomial on second dim
srng = RandomStreams()
x *= srng.binomial(n=1, size=(x.shape[1],), p=retain_prob, dtype=theano.config.floatX).dimshuffle('x',0)
x /= retain_prob
return x
def model(x, w_h, b_h, w_h2, b_h2, w_o, b_o, p=0.0):
#h = T.maximum(0, T.dot(x, w_h) + b_h)
h = T.nnet.sigmoid(T.dot(x, w_h) + b_h)
#h2 = T.maximum(0, T.dot(h, w_h2) + b_h2)
h2 = T.nnet.sigmoid(T.dot(h, w_h2) + b_h2)
h2_d = dropout(h2, p)
p_fb = T.nnet.softmax(T.dot(h2_d, w_o) + b_o)
return p_fb
if pretrained == False :
w_h = init_weights((nb_activations, nb_hiddens))
b_h = init_weights((nb_hiddens,))
w_h2 = init_weights((nb_hiddens, nb_hiddens2))
b_h2 = init_weights((nb_hiddens2,))
w_o = init_weights((nb_hiddens2, 2))
b_o = init_weights((2,))
else:
if len(os.path.dirname(__file__)) >0:
paramsValues = pickle.load(open(os.path.dirname(__file__)+"/models/paramsBG.pickle","rb"))
else:
paramsValues = pickle.load(open("./models/paramsBG.pickle","rb"))
w_h = theano.shared(paramsValues[0])
b_h = theano.shared(paramsValues[1])
w_h2 = theano.shared(paramsValues[2])
b_h2 = theano.shared(paramsValues[3])
w_o = theano.shared(paramsValues[4])
b_o = theano.shared(paramsValues[5])
self.params = [w_h, b_h, w_h2, b_h2, w_o, b_o]
#get foreground background prob flatten
self.p_fb_flat_test = model(x_activ_flat, *self.params, p=0.0)
self.p_fb_flat_train = model(x_activ_flat, *self.params, p=0.0)
#get foreground background proba reshaped as tensor, that s all we want for inference
self.p_fb = self.p_fb_flat_test.reshape((x_activ.shape[0],x_activ.shape[2],x_activ.shape[3],2)).dimshuffle(0,3,1,2)
def getParams(self):
params_values = []
for p in range(len(self.params)):
params_values.append(self.params[p].get_value())
return params_values
def setParams(self, params_values):
for p in range(len(params_values)):
self.params[p].set_value(params_values[p])
|
cloudviz/agentless-system-crawler
|
refs/heads/master
|
tests/functional/test_functional_cos_emitter.py
|
1
|
import unittest
import docker
import os
import subprocess
class CrawlerCosEmitterTests(unittest.TestCase):
def setUp(self):
self.docker = docker.Client(
base_url='unix://var/run/docker.sock', version='auto')
os.mkdir('/etc/cos-secrets', 0755 )
f=open("/etc/cos-secrets/access_key", "w+")
f.write("test")
f.close()
f=open("/etc/cos-secrets/secret_key", "w+")
f.write("testforall")
f.close()
f=open("/etc/cos-secrets/location", "w+")
f.write("test")
f.close()
self.start_minio_container()
self.start_crawled_container()
def tearDown(self):
containers = self.docker.containers()
for container in containers:
self.docker.stop(container=container['Id'])
self.docker.remove_container(container=container['Id'])
def start_minio_container(self):
self.docker.pull(repository='shri4u/minio2', tag='latest')
self.minio_container = self.docker.create_container(
image='shri4u/minio2', ports=[9000],
host_config=self.docker.create_host_config(port_bindings={
9000: 9000
}),
environment={'MINIO_ACCESS_KEY': 'test',
'MINIO_SECRET_KEY': 'testforall'},
command="server /data")
self.docker.start(container=self.minio_container['Id'])
def start_crawled_container(self):
# start a container to be crawled
self.docker.pull(repository='alpine', tag='latest')
self.container = self.docker.create_container(
image='alpine:latest', command='/bin/sleep 60')
self.docker.start(container=self.container['Id'])
def testFuntionalCosEmitter(self):
env = os.environ.copy()
mypath = os.path.dirname(os.path.realpath(__file__))
# crawler itself needs to be root
process = subprocess.Popen(
[
'/usr/bin/python', mypath + '/../../crawler/crawler.py',
'--url', 'cos://127.0.0.1:9000/test',
'--features', 'cpu,memory',
'--crawlContainers', self.container['Id'],
'--crawlmode', 'OUTCONTAINER',
'--numprocesses', '1'
],
env=env)
stdout, stderr = process.communicate()
assert process.returncode == 0
print stderr
print stdout
if __name__ == '__main__':
unittest.main()
|
Stranger6667/django-hstore
|
refs/heads/master
|
runtests.py
|
3
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, "tests")
if __name__ == "__main__":
from django.core.management import execute_from_command_line
args = sys.argv
args.insert(1, "test")
execute_from_command_line(args)
|
lz1988/company-site
|
refs/heads/master
|
tests/modeltests/properties/tests.py
|
126
|
from __future__ import absolute_import
from django.test import TestCase
from .models import Person
class PropertyTests(TestCase):
def setUp(self):
self.a = Person(first_name='John', last_name='Lennon')
self.a.save()
def test_getter(self):
self.assertEqual(self.a.full_name, 'John Lennon')
def test_setter(self):
# The "full_name" property hasn't provided a "set" method.
self.assertRaises(AttributeError, setattr, self.a, 'full_name', 'Paul McCartney')
# But "full_name_2" has, and it can be used to initialise the class.
a2 = Person(full_name_2 = 'Paul McCartney')
a2.save()
self.assertEqual(a2.first_name, 'Paul')
|
hanlind/nova
|
refs/heads/master
|
nova/tests/unit/scheduler/test_rpcapi.py
|
16
|
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for nova.scheduler.rpcapi
"""
import mock
from oslo_config import cfg
from nova import context
from nova import objects
from nova.scheduler import rpcapi as scheduler_rpcapi
from nova import test
CONF = cfg.CONF
class SchedulerRpcAPITestCase(test.NoDBTestCase):
def _test_scheduler_api(self, method, rpc_method, expected_args=None,
**kwargs):
ctxt = context.RequestContext('fake_user', 'fake_project')
rpcapi = scheduler_rpcapi.SchedulerAPI()
self.assertIsNotNone(rpcapi.client)
self.assertEqual(rpcapi.client.target.topic, CONF.scheduler_topic)
expected_retval = 'foo' if rpc_method == 'call' else None
expected_version = kwargs.pop('version', None)
expected_fanout = kwargs.pop('fanout', None)
expected_kwargs = kwargs.copy()
if expected_args:
expected_kwargs = expected_args
prepare_kwargs = {}
if expected_fanout:
prepare_kwargs['fanout'] = True
if expected_version:
prepare_kwargs['version'] = expected_version
# NOTE(sbauza): We need to persist the method before mocking it
orig_prepare = rpcapi.client.prepare
def fake_can_send_version(version=None):
return orig_prepare(version=version).can_send_version()
@mock.patch.object(rpcapi.client, rpc_method,
return_value=expected_retval)
@mock.patch.object(rpcapi.client, 'prepare',
return_value=rpcapi.client)
@mock.patch.object(rpcapi.client, 'can_send_version',
side_effect=fake_can_send_version)
def do_test(mock_csv, mock_prepare, mock_rpc_method):
retval = getattr(rpcapi, method)(ctxt, **kwargs)
self.assertEqual(retval, expected_retval)
mock_prepare.assert_called_once_with(**prepare_kwargs)
mock_rpc_method.assert_called_once_with(ctxt, method,
**expected_kwargs)
do_test()
def test_select_destinations(self):
fake_spec = objects.RequestSpec()
self._test_scheduler_api('select_destinations', rpc_method='call',
spec_obj=fake_spec,
version='4.3')
@mock.patch.object(objects.RequestSpec, 'to_legacy_filter_properties_dict')
@mock.patch.object(objects.RequestSpec, 'to_legacy_request_spec_dict')
def test_select_destinations_with_old_manager(self, to_spec, to_props):
self.flags(scheduler='4.0', group='upgrade_levels')
to_spec.return_value = 'fake_request_spec'
to_props.return_value = 'fake_prop'
fake_spec = objects.RequestSpec()
self._test_scheduler_api('select_destinations', rpc_method='call',
expected_args={'request_spec': 'fake_request_spec',
'filter_properties': 'fake_prop'},
spec_obj=fake_spec,
version='4.0')
def test_update_aggregates(self):
self._test_scheduler_api('update_aggregates', rpc_method='cast',
aggregates='aggregates',
version='4.1',
fanout=True)
def test_delete_aggregate(self):
self._test_scheduler_api('delete_aggregate', rpc_method='cast',
aggregate='aggregate',
version='4.1',
fanout=True)
def test_update_instance_info(self):
self._test_scheduler_api('update_instance_info', rpc_method='cast',
host_name='fake_host',
instance_info='fake_instance',
fanout=True,
version='4.2')
def test_delete_instance_info(self):
self._test_scheduler_api('delete_instance_info', rpc_method='cast',
host_name='fake_host',
instance_uuid='fake_uuid',
fanout=True,
version='4.2')
def test_sync_instance_info(self):
self._test_scheduler_api('sync_instance_info', rpc_method='cast',
host_name='fake_host',
instance_uuids=['fake1', 'fake2'],
fanout=True,
version='4.2')
|
manabu/linebot1
|
refs/heads/master
|
app.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask, request, abort
from flask_apscheduler import APScheduler
import pymongo
import json
import datetime
import time
import os
import re
# Line bot
from linebot import (
LineBotApi, WebhookHandler
)
from linebot.exceptions import (
InvalidSignatureError
)
from linebot.models import (
MessageEvent, TextMessage, TextSendMessage,
SourceUser, SourceGroup, SourceRoom,
TemplateSendMessage, ConfirmTemplate, MessageTemplateAction,
ButtonsTemplate, URITemplateAction, PostbackTemplateAction,
CarouselTemplate, CarouselColumn, PostbackEvent,
StickerMessage, StickerSendMessage, LocationMessage, LocationSendMessage,
ImageMessage, VideoMessage, AudioMessage,
UnfollowEvent, FollowEvent, JoinEvent, LeaveEvent, BeaconEvent
)
# mongodb
mongodbhost = os.getenv('MONGODB_HOST', 'localhost')
mongodbport = os.getenv('MONGODB_PORT', 27017)
mongoclient = pymongo.MongoClient(mongodbhost, mongodbport)
linedb=mongoclient.linebot
botplace = linedb.place
botlog = linedb.log
bottime = linedb.bottime
# check environment value
# get channel_secret and channel_access_token from your environment variable
channel_secret = os.getenv('LINE_CHANNEL_SECRET', None)
channel_access_token = os.getenv('LINE_CHANNEL_ACCESS_TOKEN', None)
if channel_secret is None:
print('Specify LINE_CHANNEL_SECRET as environment variable.')
sys.exit(1)
if channel_access_token is None:
print('Specify LINE_CHANNEL_ACCESS_TOKEN as environment variable.')
sys.exit(1)
# job scheduler
class Config(object):
JOBS = [
{
'id': 'job1',
'func': '__main__:job1',
'args': (1, 2),
'trigger': 'interval',
'seconds': 60
}
]
SCHEDULER_API_ENABLED = True
def send(sendid,msg):
print "push message"
line_bot_api.push_message(sendid,TextSendMessage(msg))
def job1(a, b):
#print(str(a) + ' ' + str(b))
now = datetime.datetime.now()
#if now.hour==7 and now.minute==45:
# send(u'おはようございます。朝の薬のみました?のませました?薬の写真おくってみませんか?')
#if now.hour==12 and now.minute==45:
# send(u'こんにちは。昼の薬のみました?のませました?近況はいかがでしょう?')
#if now.hour==19 and now.minute==45:
# send(u'こんばんは。夜の薬のみました?のませました?お話してみませんか?')
print now
items = bottime.find({"hour":now.hour,"minute":now.minute})
if not isinstance(items,type(None)):
print "Timer set"
for item in items:
print item
send(item["id"],u"薬のみましたか?のませましたか?\n"+str(now.hour)+u"時"+str(now.minute)+u"分です。"+u"設定された時間です\n"+u"近況はいかがですか?\n")
app = Flask(__name__)
app.config.from_object(Config())
scheduler = APScheduler()
# it is also possible to enable the API directly
# scheduler.api_enabled = True
scheduler.init_app(app)
scheduler.start()
# Line bot
line_bot_api = LineBotApi(channel_access_token)
handler = WebhookHandler(channel_secret)
# help message
helpmessage="""エコーと、打ってから後ろに何か続けると、うった単語を返します
「時間追加8時5分」、指定した時間をセットします
「時間消去8時10分」、指定した時間の設定を削除します
「時間確認」、現在設定されている時間を確認します
「時間全部消去」、設定されている時間をすべて消去します
「またね」、ボットが退出します
"""
@app.route("/")
def hello():
return "Hello World!"
@app.route("/callback", methods=['POST'])
def callback():
# get X-Line-Signature header value
signature = request.headers['X-Line-Signature']
# get request body as text
body = request.get_data(as_text=True)
app.logger.info("Request body: " + body)
# handle webhook body
try:
handler.handle(body, signature)
except InvalidSignatureError:
abort(400)
return 'OK'
@handler.add(MessageEvent, message=TextMessage)
def handle_message(event):
#line_bot_api.reply_message(
# event.reply_token,
# TextSendMessage(text=event.message.text))
source = event.source
print dir(source)
print source.as_json_string()
print source.type
id = ""
if source.type == "user":
id = source.user_id
elif source.type == "group":
id = source.group_id
elif source.type == "room":
id = source.room_id
print id
text = event.message.text
if re.compile(u"^エコー").search(text):
line_bot_api.reply_message(event.reply_token, TextSendMessage(text=u'あなたは['+event.message.text+u']といいました'))
source = event.source
print dir(source)
#print source.type
#print source.user_id
#print("sleep 30sec")
#time.sleep(30)
print("send push to ["+source.user_id+"]")
line_bot_api.push_message(source.user_id,TextSendMessage(text=u'あなたは30秒前に、['+event.message.text+u']といいました'))
print("finish send push")
elif re.compile(u"^(おしえて|教えて|ヘルプ|help)$").search(text):
#line_bot_api.reply_message(event.reply_token, TextSendMessage(text=u'エコーと、打ってから後ろに何か続けると、うった単語を返します'))
line_bot_api.reply_message(event.reply_token, TextSendMessage(text=helpmessage))
elif re.compile(u"^(またね)$").search(text):
if source.type == "room":
line_bot_api.reply_message(event.reply_token, TextSendMessage(text=u'またいつでも呼んでください。ありがとうございました。'))
line_bot_api.leave_room(id)
place = botplace.find_one({"id":id})
print place
if isinstance(place,type(None)):
print "leave channel but insert id"
print botplace.insert_one({"id":id,"type":source.type}).inserted_id
botplace.update({"id":id},{'$set':{"join":False}})
else:
line_bot_api.reply_message(event.reply_token, TextSendMessage(text=u'ここからは退出できないようです'))
elif re.compile(u"^(時間追加|時間設定)[\s]?([0-9]*)(?:時|じ|:|:)([0-9]*)(?:分|ふん)?$").search(text):
print "Time add"
print text
m = re.compile(u"^(時間追加|時間設定)[\s]?([0-9]*)(?:時|じ|:|:)([0-9]*)(?:分|ふん)?$").search(text)
bottime.insert_one({"id":id,"hour":int(m.group(2)),"minute":int(m.group(3))})
alltime=""
for item in bottime.find({"id":id}):
print item
alltime=alltime+ str(item["hour"])
alltime=alltime+ u"時"
alltime=alltime+ str(item["minute"])
alltime=alltime+ u"分\n"
line_bot_api.reply_message(event.reply_token, TextSendMessage(text=u'時間追加しました\nいま設定されている時間は以下のようになります\n'+alltime))
elif re.compile(u"^(時間削除|時間消去|時刻削除|時刻消去)[\s]?([0-9]*)(?:時|じ|:|:)([0-9]*)(?:分|ふん)?$").search(text):
print "Time delete"
print text
m = re.compile(u"^(時間削除|時間消去|時刻削除|時刻消去)[\s]?([0-9]*)(?:時|じ|:|:)([0-9]*)(?:分|ふん)?$").search(text)
bottime.delete_many({"id":id,"hour":int(m.group(2)),"minute":int(m.group(3))})
alltime=""
for item in bottime.find({"id":id}):
print item
alltime=alltime+ str(item["hour"])
alltime=alltime+ u"時"
alltime=alltime+ str(item["minute"])
alltime=alltime+ u"分\n"
line_bot_api.reply_message(event.reply_token, TextSendMessage(text=u'時間を削除しました\nいま設定されている時間は以下のようになります\n'+alltime))
elif re.compile(u"^(時間全部消去)$").search(text):
bottime.delete_many({"id":id})
line_bot_api.reply_message(event.reply_token, TextSendMessage(text=u'設定されている時間を全部消しました'))
elif re.compile(u"^(時間確認)$").search(text):
alltime=""
items=bottime.find({"id":id})
if not isinstance(items,type(None)):
for item in items:
print item
alltime=alltime+ str(item["hour"])
alltime=alltime+ u"時"
alltime=alltime+ str(item["minute"])
alltime=alltime+ u"分\n"
line_bot_api.reply_message(event.reply_token, TextSendMessage(text=u'いま設定されている時間は以下のようになります\n'+alltime))
else:
line_bot_api.reply_message(event.reply_token, TextSendMessage(text=u'現在設定されている時間はございません\n'+alltime))
print "message json"
print event.message.as_json_string()
print "event json"
print event.as_json_string()
print "insert to log"
print linedb.name
#print botlog.insert_one({"x":10}).inserted_id
print botlog.insert_one(json.loads(event.as_json_string())).inserted_id
# join
@handler.add(JoinEvent)
def handle_join(event):
source = event.source
print "join"
id = ""
if source.type == "user":
id = source.user_id
elif source.type == "group":
id = source.group_id
elif source.type == "room":
id = source.room_id
print id
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='Joined this ' + event.source.type))
print "insert to log raw message"
print botlog.insert_one(json.loads(event.as_json_string())).inserted_id
print "check id"
place = botplace.find_one({"id":id})
print place
print type(place)
if isinstance(place,type(None)):
print "join channel create insert id"
print botplace.insert_one({"id":id,"type":source.type}).inserted_id
else:
print "already created"
print dir(place)
botplace.update({"id":id},{'$set':{"join":True}})
@handler.add(LeaveEvent)
def handle_leave():
app.logger.info("Got leave event")
@handler.add(PostbackEvent)
def handle_postback(event):
if event.postback.data == 'ping':
line_bot_api.reply_message(
event.reply_token, TextSendMessage(text='pong'))
@handler.add(BeaconEvent)
def handle_beacon(event):
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='Got beacon event. hwid=' + event.beacon.hwid))
if __name__ == "__main__":
app.run(
host='0.0.0.0',
port='9000'
)
|
slumtrimpet/ctypesgen
|
refs/heads/master
|
ctypesgencore/printer_python/preamble.py
|
15
|
import ctypes, os, sys
from ctypes import *
_int_types = (c_int16, c_int32)
if hasattr(ctypes, 'c_int64'):
# Some builds of ctypes apparently do not have c_int64
# defined; it's a pretty good bet that these builds do not
# have 64-bit pointers.
_int_types += (c_int64,)
for t in _int_types:
if sizeof(t) == sizeof(c_size_t):
c_ptrdiff_t = t
del t
del _int_types
class c_void(Structure):
# c_void_p is a buggy return type, converting to int, so
# POINTER(None) == c_void_p is actually written as
# POINTER(c_void), so it can be treated as a real pointer.
_fields_ = [('dummy', c_int)]
def POINTER(obj):
p = ctypes.POINTER(obj)
# Convert None to a real NULL pointer to work around bugs
# in how ctypes handles None on 64-bit platforms
if not isinstance(p.from_param, classmethod):
def from_param(cls, x):
if x is None:
return cls()
else:
return x
p.from_param = classmethod(from_param)
return p
class UserString:
def __init__(self, seq):
if isinstance(seq, basestring):
self.data = seq
elif isinstance(seq, UserString):
self.data = seq.data[:]
else:
self.data = str(seq)
def __str__(self): return str(self.data)
def __repr__(self): return repr(self.data)
def __int__(self): return int(self.data)
def __long__(self): return long(self.data)
def __float__(self): return float(self.data)
def __complex__(self): return complex(self.data)
def __hash__(self): return hash(self.data)
def __cmp__(self, string):
if isinstance(string, UserString):
return cmp(self.data, string.data)
else:
return cmp(self.data, string)
def __contains__(self, char):
return char in self.data
def __len__(self): return len(self.data)
def __getitem__(self, index): return self.__class__(self.data[index])
def __getslice__(self, start, end):
start = max(start, 0); end = max(end, 0)
return self.__class__(self.data[start:end])
def __add__(self, other):
if isinstance(other, UserString):
return self.__class__(self.data + other.data)
elif isinstance(other, basestring):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + str(other))
def __radd__(self, other):
if isinstance(other, basestring):
return self.__class__(other + self.data)
else:
return self.__class__(str(other) + self.data)
def __mul__(self, n):
return self.__class__(self.data*n)
__rmul__ = __mul__
def __mod__(self, args):
return self.__class__(self.data % args)
# the following methods are defined in alphabetical order:
def capitalize(self): return self.__class__(self.data.capitalize())
def center(self, width, *args):
return self.__class__(self.data.center(width, *args))
def count(self, sub, start=0, end=sys.maxint):
return self.data.count(sub, start, end)
def decode(self, encoding=None, errors=None): # XXX improve this?
if encoding:
if errors:
return self.__class__(self.data.decode(encoding, errors))
else:
return self.__class__(self.data.decode(encoding))
else:
return self.__class__(self.data.decode())
def encode(self, encoding=None, errors=None): # XXX improve this?
if encoding:
if errors:
return self.__class__(self.data.encode(encoding, errors))
else:
return self.__class__(self.data.encode(encoding))
else:
return self.__class__(self.data.encode())
def endswith(self, suffix, start=0, end=sys.maxint):
return self.data.endswith(suffix, start, end)
def expandtabs(self, tabsize=8):
return self.__class__(self.data.expandtabs(tabsize))
def find(self, sub, start=0, end=sys.maxint):
return self.data.find(sub, start, end)
def index(self, sub, start=0, end=sys.maxint):
return self.data.index(sub, start, end)
def isalpha(self): return self.data.isalpha()
def isalnum(self): return self.data.isalnum()
def isdecimal(self): return self.data.isdecimal()
def isdigit(self): return self.data.isdigit()
def islower(self): return self.data.islower()
def isnumeric(self): return self.data.isnumeric()
def isspace(self): return self.data.isspace()
def istitle(self): return self.data.istitle()
def isupper(self): return self.data.isupper()
def join(self, seq): return self.data.join(seq)
def ljust(self, width, *args):
return self.__class__(self.data.ljust(width, *args))
def lower(self): return self.__class__(self.data.lower())
def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars))
def partition(self, sep):
return self.data.partition(sep)
def replace(self, old, new, maxsplit=-1):
return self.__class__(self.data.replace(old, new, maxsplit))
def rfind(self, sub, start=0, end=sys.maxint):
return self.data.rfind(sub, start, end)
def rindex(self, sub, start=0, end=sys.maxint):
return self.data.rindex(sub, start, end)
def rjust(self, width, *args):
return self.__class__(self.data.rjust(width, *args))
def rpartition(self, sep):
return self.data.rpartition(sep)
def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars))
def split(self, sep=None, maxsplit=-1):
return self.data.split(sep, maxsplit)
def rsplit(self, sep=None, maxsplit=-1):
return self.data.rsplit(sep, maxsplit)
def splitlines(self, keepends=0): return self.data.splitlines(keepends)
def startswith(self, prefix, start=0, end=sys.maxint):
return self.data.startswith(prefix, start, end)
def strip(self, chars=None): return self.__class__(self.data.strip(chars))
def swapcase(self): return self.__class__(self.data.swapcase())
def title(self): return self.__class__(self.data.title())
def translate(self, *args):
return self.__class__(self.data.translate(*args))
def upper(self): return self.__class__(self.data.upper())
def zfill(self, width): return self.__class__(self.data.zfill(width))
class MutableString(UserString):
"""mutable string objects
Python strings are immutable objects. This has the advantage, that
strings may be used as dictionary keys. If this property isn't needed
and you insist on changing string values in place instead, you may cheat
and use MutableString.
But the purpose of this class is an educational one: to prevent
people from inventing their own mutable string class derived
from UserString and than forget thereby to remove (override) the
__hash__ method inherited from UserString. This would lead to
errors that would be very hard to track down.
A faster and better solution is to rewrite your program using lists."""
def __init__(self, string=""):
self.data = string
def __hash__(self):
raise TypeError("unhashable type (it is mutable)")
def __setitem__(self, index, sub):
if index < 0:
index += len(self.data)
if index < 0 or index >= len(self.data): raise IndexError
self.data = self.data[:index] + sub + self.data[index+1:]
def __delitem__(self, index):
if index < 0:
index += len(self.data)
if index < 0 or index >= len(self.data): raise IndexError
self.data = self.data[:index] + self.data[index+1:]
def __setslice__(self, start, end, sub):
start = max(start, 0); end = max(end, 0)
if isinstance(sub, UserString):
self.data = self.data[:start]+sub.data+self.data[end:]
elif isinstance(sub, basestring):
self.data = self.data[:start]+sub+self.data[end:]
else:
self.data = self.data[:start]+str(sub)+self.data[end:]
def __delslice__(self, start, end):
start = max(start, 0); end = max(end, 0)
self.data = self.data[:start] + self.data[end:]
def immutable(self):
return UserString(self.data)
def __iadd__(self, other):
if isinstance(other, UserString):
self.data += other.data
elif isinstance(other, basestring):
self.data += other
else:
self.data += str(other)
return self
def __imul__(self, n):
self.data *= n
return self
class String(MutableString, Union):
_fields_ = [('raw', POINTER(c_char)),
('data', c_char_p)]
def __init__(self, obj=""):
if isinstance(obj, (str, unicode, UserString)):
self.data = str(obj)
else:
self.raw = obj
def __len__(self):
return self.data and len(self.data) or 0
def from_param(cls, obj):
# Convert None or 0
if obj is None or obj == 0:
return cls(POINTER(c_char)())
# Convert from String
elif isinstance(obj, String):
return obj
# Convert from str
elif isinstance(obj, str):
return cls(obj)
# Convert from c_char_p
elif isinstance(obj, c_char_p):
return obj
# Convert from POINTER(c_char)
elif isinstance(obj, POINTER(c_char)):
return obj
# Convert from raw pointer
elif isinstance(obj, int):
return cls(cast(obj, POINTER(c_char)))
# Convert from object
else:
return String.from_param(obj._as_parameter_)
from_param = classmethod(from_param)
def ReturnString(obj, func=None, arguments=None):
return String.from_param(obj)
# As of ctypes 1.0, ctypes does not support custom error-checking
# functions on callbacks, nor does it support custom datatypes on
# callbacks, so we must ensure that all callbacks return
# primitive datatypes.
#
# Non-primitive return values wrapped with UNCHECKED won't be
# typechecked, and will be converted to c_void_p.
def UNCHECKED(type):
if (hasattr(type, "_type_") and isinstance(type._type_, str)
and type._type_ != "P"):
return type
else:
return c_void_p
# ctypes doesn't have direct support for variadic functions, so we have to write
# our own wrapper class
class _variadic_function(object):
def __init__(self,func,restype,argtypes):
self.func=func
self.func.restype=restype
self.argtypes=argtypes
def _as_parameter_(self):
# So we can pass this variadic function as a function pointer
return self.func
def __call__(self,*args):
fixed_args=[]
i=0
for argtype in self.argtypes:
# Typecheck what we can
fixed_args.append(argtype.from_param(args[i]))
i+=1
return self.func(*fixed_args+list(args[i:]))
|
benspaulding/django
|
refs/heads/master
|
tests/regressiontests/urlpatterns_reverse/included_namespace_urls.py
|
92
|
from __future__ import absolute_import
from django.conf.urls import patterns, url, include
from .namespace_urls import URLObject
from .views import view_class_instance
testobj3 = URLObject('testapp', 'test-ns3')
urlpatterns = patterns('regressiontests.urlpatterns_reverse.views',
url(r'^normal/$', 'empty_view', name='inc-normal-view'),
url(r'^normal/(?P<arg1>\d+)/(?P<arg2>\d+)/$', 'empty_view', name='inc-normal-view'),
url(r'^\+\\\$\*/$', 'empty_view', name='inc-special-view'),
url(r'^mixed_args/(\d+)/(?P<arg2>\d+)/$', 'empty_view', name='inc-mixed-args'),
url(r'^no_kwargs/(\d+)/(\d+)/$', 'empty_view', name='inc-no-kwargs'),
url(r'^view_class/(?P<arg1>\d+)/(?P<arg2>\d+)/$', view_class_instance, name='inc-view-class'),
(r'^test3/', include(testobj3.urls)),
(r'^ns-included3/', include('regressiontests.urlpatterns_reverse.included_urls', namespace='inc-ns3')),
(r'^ns-included4/', include('regressiontests.urlpatterns_reverse.namespace_urls', namespace='inc-ns4')),
)
|
drakipovic/deep-learning
|
refs/heads/master
|
lab2/train.py
|
1
|
import time
import numpy as np
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import nn
import layers
DATA_DIR = '/home/dinek/datasets/MNIST/'
SAVE_DIR = "/home/dinek/source/fer/out/"
config = {}
config['max_epochs'] = 8
config['batch_size'] = 50
config['save_dir'] = SAVE_DIR
config['lr_policy'] = {1:{'lr':1e-1}, 3:{'lr':1e-2}, 5:{'lr':1e-3}, 7:{'lr':1e-4}}
#np.random.seed(100)
np.random.seed(int(time.time() * 1e6) % 2**31)
dataset = input_data.read_data_sets(DATA_DIR, one_hot=True)
train_x = dataset.train.images
train_x = train_x.reshape([-1, 1, 28, 28])
train_y = dataset.train.labels
valid_x = dataset.validation.images
valid_x = valid_x.reshape([-1, 1, 28, 28])
valid_y = dataset.validation.labels
test_x = dataset.test.images
test_x = test_x.reshape([-1, 1, 28, 28])
test_y = dataset.test.labels
train_mean = train_x.mean()
train_x -= train_mean
valid_x -= train_mean
test_x -= train_mean
net = []
inputs = np.random.randn(config['batch_size'], 1, 28, 28)
net += [layers.Convolution(inputs, 16, 5, "conv1")]
net += [layers.MaxPooling(net[-1], "pool1")]
net += [layers.ReLU(net[-1], "relu1")]
net += [layers.Convolution(net[-1], 32, 5, "conv2")]
net += [layers.MaxPooling(net[-1], "pool2")]
net += [layers.ReLU(net[-1], "relu2")]
# out = 7x7
net += [layers.Flatten(net[-1], "flatten3")]
net += [layers.FC(net[-1], 512, "fc3")]
net += [layers.ReLU(net[-1], "relu3")]
net += [layers.FC(net[-1], 10, "logits")]
loss = layers.SoftmaxCrossEntropyWithLogits()
nn.train(train_x, train_y, valid_x, valid_y, net, loss, config)
nn.evaluate("Test", test_x, test_y, net, loss, config)
|
GbalsaC/bitnamiP
|
refs/heads/master
|
venv/lib/python2.7/site-packages/PIL/TiffImagePlugin.py
|
18
|
#
# The Python Imaging Library.
# $Id$
#
# TIFF file handling
#
# TIFF is a flexible, if somewhat aged, image file format originally
# defined by Aldus. Although TIFF supports a wide variety of pixel
# layouts and compression methods, the name doesn't really stand for
# "thousands of incompatible file formats," it just feels that way.
#
# To read TIFF data from a stream, the stream must be seekable. For
# progressive decoding, make sure to use TIFF files where the tag
# directory is placed first in the file.
#
# History:
# 1995-09-01 fl Created
# 1996-05-04 fl Handle JPEGTABLES tag
# 1996-05-18 fl Fixed COLORMAP support
# 1997-01-05 fl Fixed PREDICTOR support
# 1997-08-27 fl Added support for rational tags (from Perry Stoll)
# 1998-01-10 fl Fixed seek/tell (from Jan Blom)
# 1998-07-15 fl Use private names for internal variables
# 1999-06-13 fl Rewritten for PIL 1.0 (1.0)
# 2000-10-11 fl Additional fixes for Python 2.0 (1.1)
# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2)
# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3)
# 2001-12-18 fl Added workaround for broken Matrox library
# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart)
# 2003-05-19 fl Check FILLORDER tag
# 2003-09-26 fl Added RGBa support
# 2004-02-24 fl Added DPI support; fixed rational write support
# 2005-02-07 fl Added workaround for broken Corel Draw 10 files
# 2006-01-09 fl Added support for float/double tags (from Russell Nelson)
#
# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved.
# Copyright (c) 1995-1997 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
from __future__ import print_function
__version__ = "1.3.5"
from PIL import Image, ImageFile
from PIL import ImagePalette
from PIL import _binary
from PIL._util import isStringType
import warnings
import array
import sys
import collections
import itertools
import os
import io
# Set these to true to force use of libtiff for reading or writing.
READ_LIBTIFF = False
WRITE_LIBTIFF = False
II = b"II" # little-endian (Intel style)
MM = b"MM" # big-endian (Motorola style)
i8 = _binary.i8
o8 = _binary.o8
if sys.byteorder == "little":
native_prefix = II
else:
native_prefix = MM
#
# --------------------------------------------------------------------
# Read TIFF files
il16 = _binary.i16le
il32 = _binary.i32le
ol16 = _binary.o16le
ol32 = _binary.o32le
ib16 = _binary.i16be
ib32 = _binary.i32be
ob16 = _binary.o16be
ob32 = _binary.o32be
# a few tag names, just to make the code below a bit more readable
IMAGEWIDTH = 256
IMAGELENGTH = 257
BITSPERSAMPLE = 258
COMPRESSION = 259
PHOTOMETRIC_INTERPRETATION = 262
FILLORDER = 266
IMAGEDESCRIPTION = 270
STRIPOFFSETS = 273
SAMPLESPERPIXEL = 277
ROWSPERSTRIP = 278
STRIPBYTECOUNTS = 279
X_RESOLUTION = 282
Y_RESOLUTION = 283
PLANAR_CONFIGURATION = 284
RESOLUTION_UNIT = 296
SOFTWARE = 305
DATE_TIME = 306
ARTIST = 315
PREDICTOR = 317
COLORMAP = 320
TILEOFFSETS = 324
EXTRASAMPLES = 338
SAMPLEFORMAT = 339
JPEGTABLES = 347
COPYRIGHT = 33432
IPTC_NAA_CHUNK = 33723 # newsphoto properties
PHOTOSHOP_CHUNK = 34377 # photoshop properties
ICCPROFILE = 34675
EXIFIFD = 34665
XMP = 700
# https://github.com/fiji/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java
IMAGEJ_META_DATA_BYTE_COUNTS = 50838
IMAGEJ_META_DATA = 50839
COMPRESSION_INFO = {
# Compression => pil compression name
1: "raw",
2: "tiff_ccitt",
3: "group3",
4: "group4",
5: "tiff_lzw",
6: "tiff_jpeg", # obsolete
7: "jpeg",
8: "tiff_adobe_deflate",
32771: "tiff_raw_16", # 16-bit padding
32773: "packbits",
32809: "tiff_thunderscan",
32946: "tiff_deflate",
34676: "tiff_sgilog",
34677: "tiff_sgilog24",
}
COMPRESSION_INFO_REV = dict([(v, k) for (k, v) in COMPRESSION_INFO.items()])
OPEN_INFO = {
# (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample,
# ExtraSamples) => mode, rawmode
(II, 0, 1, 1, (1,), ()): ("1", "1;I"),
(II, 0, 1, 2, (1,), ()): ("1", "1;IR"),
(II, 0, 1, 1, (8,), ()): ("L", "L;I"),
(II, 0, 1, 2, (8,), ()): ("L", "L;IR"),
(II, 0, 3, 1, (32,), ()): ("F", "F;32F"),
(II, 1, 1, 1, (1,), ()): ("1", "1"),
(II, 1, 1, 1, (4,), ()): ("L", "L;4"),
(II, 1, 1, 2, (1,), ()): ("1", "1;R"),
(II, 1, 1, 1, (8,), ()): ("L", "L"),
(II, 1, 1, 1, (8, 8), (2,)): ("LA", "LA"),
(II, 1, 1, 2, (8,), ()): ("L", "L;R"),
(II, 1, 1, 1, (12,), ()): ("I;16", "I;12"),
(II, 1, 1, 1, (16,), ()): ("I;16", "I;16"),
(II, 1, 2, 1, (16,), ()): ("I;16S", "I;16S"),
(II, 1, 1, 1, (32,), ()): ("I", "I;32N"),
(II, 1, 2, 1, (32,), ()): ("I", "I;32S"),
(II, 1, 3, 1, (32,), ()): ("F", "F;32F"),
(II, 2, 1, 1, (8, 8, 8), ()): ("RGB", "RGB"),
(II, 2, 1, 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
(II, 2, 1, 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
(II, 2, 1, 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
(II, 2, 1, 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
(II, 2, 1, 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
(II, 2, 1, 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
(II, 3, 1, 1, (1,), ()): ("P", "P;1"),
(II, 3, 1, 2, (1,), ()): ("P", "P;1R"),
(II, 3, 1, 1, (2,), ()): ("P", "P;2"),
(II, 3, 1, 2, (2,), ()): ("P", "P;2R"),
(II, 3, 1, 1, (4,), ()): ("P", "P;4"),
(II, 3, 1, 2, (4,), ()): ("P", "P;4R"),
(II, 3, 1, 1, (8,), ()): ("P", "P"),
(II, 3, 1, 1, (8, 8), (2,)): ("PA", "PA"),
(II, 3, 1, 2, (8,), ()): ("P", "P;R"),
(II, 5, 1, 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
(II, 6, 1, 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"),
(II, 8, 1, 1, (8, 8, 8), ()): ("LAB", "LAB"),
(MM, 0, 1, 1, (1,), ()): ("1", "1;I"),
(MM, 0, 1, 2, (1,), ()): ("1", "1;IR"),
(MM, 0, 1, 1, (8,), ()): ("L", "L;I"),
(MM, 0, 1, 2, (8,), ()): ("L", "L;IR"),
(MM, 1, 1, 1, (1,), ()): ("1", "1"),
(MM, 1, 1, 2, (1,), ()): ("1", "1;R"),
(MM, 1, 1, 1, (8,), ()): ("L", "L"),
(MM, 1, 1, 1, (8, 8), (2,)): ("LA", "LA"),
(MM, 1, 1, 2, (8,), ()): ("L", "L;R"),
(MM, 1, 1, 1, (16,), ()): ("I;16B", "I;16B"),
(MM, 1, 2, 1, (16,), ()): ("I;16BS", "I;16BS"),
(MM, 1, 2, 1, (32,), ()): ("I;32BS", "I;32BS"),
(MM, 1, 3, 1, (32,), ()): ("F", "F;32BF"),
(MM, 2, 1, 1, (8, 8, 8), ()): ("RGB", "RGB"),
(MM, 2, 1, 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
(MM, 2, 1, 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
(MM, 2, 1, 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
(MM, 2, 1, 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
(MM, 2, 1, 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
(MM, 3, 1, 1, (1,), ()): ("P", "P;1"),
(MM, 3, 1, 2, (1,), ()): ("P", "P;1R"),
(MM, 3, 1, 1, (2,), ()): ("P", "P;2"),
(MM, 3, 1, 2, (2,), ()): ("P", "P;2R"),
(MM, 3, 1, 1, (4,), ()): ("P", "P;4"),
(MM, 3, 1, 2, (4,), ()): ("P", "P;4R"),
(MM, 3, 1, 1, (8,), ()): ("P", "P"),
(MM, 3, 1, 1, (8, 8), (2,)): ("PA", "PA"),
(MM, 3, 1, 2, (8,), ()): ("P", "P;R"),
(MM, 5, 1, 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
(MM, 6, 1, 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"),
(MM, 8, 1, 1, (8, 8, 8), ()): ("LAB", "LAB"),
}
PREFIXES = [b"MM\000\052", b"II\052\000", b"II\xBC\000"]
def _accept(prefix):
return prefix[:4] in PREFIXES
##
# Wrapper for TIFF IFDs.
class ImageFileDirectory(collections.MutableMapping):
""" This class represents a TIFF tag directory. To speed things
up, we don't decode tags unless they're asked for.
Exposes a dictionary interface of the tags in the directory
ImageFileDirectory[key] = value
value = ImageFileDirectory[key]
Also contains a dictionary of tag types as read from the tiff
image file, 'ImageFileDirectory.tagtype'
Data Structures:
'public'
* self.tagtype = {} Key: numerical tiff tag number
Value: integer corresponding to the data type from
`TiffTags.TYPES`
'internal'
* self.tags = {} Key: numerical tiff tag number
Value: Decoded data, Generally a tuple.
* If set from __setval__ -- always a tuple
* Numeric types -- always a tuple
* String type -- not a tuple, returned as string
* Undefined data -- not a tuple, returned as bytes
* Byte -- not a tuple, returned as byte.
* self.tagdata = {} Key: numerical tiff tag number
Value: undecoded byte string from file
Tags will be found in either self.tags or self.tagdata, but
not both. The union of the two should contain all the tags
from the Tiff image file. External classes shouldn't
reference these unless they're really sure what they're doing.
"""
def __init__(self, prefix=II):
"""
:prefix: 'II'|'MM' tiff endianness
"""
self.prefix = prefix[:2]
if self.prefix == MM:
self.i16, self.i32 = ib16, ib32
self.o16, self.o32 = ob16, ob32
elif self.prefix == II:
self.i16, self.i32 = il16, il32
self.o16, self.o32 = ol16, ol32
else:
raise SyntaxError("not a TIFF IFD")
self.reset()
def reset(self):
#: Tags is an incomplete dictionary of the tags of the image.
#: For a complete dictionary, use the as_dict method.
self.tags = {}
self.tagdata = {}
self.tagtype = {} # added 2008-06-05 by Florian Hoech
self.next = None
self.offset = None
def __str__(self):
return str(self.as_dict())
def as_dict(self):
"""Return a dictionary of the image's tags."""
return dict(self.items())
def named(self):
"""
Returns the complete tag dictionary, with named tags where posible.
"""
from PIL import TiffTags
result = {}
for tag_code, value in self.items():
tag_name = TiffTags.TAGS.get(tag_code, tag_code)
result[tag_name] = value
return result
# dictionary API
def __len__(self):
return len(self.tagdata) + len(self.tags)
def __getitem__(self, tag):
try:
return self.tags[tag]
except KeyError:
data = self.tagdata[tag] # unpack on the fly
type = self.tagtype[tag]
size, handler = self.load_dispatch[type]
self.tags[tag] = data = handler(self, data)
del self.tagdata[tag]
return data
def getscalar(self, tag, default=None):
try:
value = self[tag]
if len(value) != 1:
if tag == SAMPLEFORMAT:
# work around broken (?) matrox library
# (from Ted Wright, via Bob Klimek)
raise KeyError # use default
raise ValueError("not a scalar")
return value[0]
except KeyError:
if default is None:
raise
return default
def __contains__(self, tag):
return tag in self.tags or tag in self.tagdata
if bytes is str:
def has_key(self, tag):
return tag in self
def __setitem__(self, tag, value):
# tags are tuples for integers
# tags are not tuples for byte, string, and undefined data.
# see load_*
if not isinstance(value, tuple):
value = (value,)
self.tags[tag] = value
def __delitem__(self, tag):
self.tags.pop(tag, self.tagdata.pop(tag, None))
def __iter__(self):
return itertools.chain(self.tags.__iter__(), self.tagdata.__iter__())
def items(self):
keys = list(self.__iter__())
values = [self[key] for key in keys]
return zip(keys, values)
# load primitives
load_dispatch = {}
def load_byte(self, data):
return data
load_dispatch[1] = (1, load_byte)
def load_string(self, data):
if data[-1:] == b'\0':
data = data[:-1]
return data.decode('latin-1', 'replace')
load_dispatch[2] = (1, load_string)
def load_short(self, data):
l = []
for i in range(0, len(data), 2):
l.append(self.i16(data, i))
return tuple(l)
load_dispatch[3] = (2, load_short)
def load_long(self, data):
l = []
for i in range(0, len(data), 4):
l.append(self.i32(data, i))
return tuple(l)
load_dispatch[4] = (4, load_long)
def load_rational(self, data):
l = []
for i in range(0, len(data), 8):
l.append((self.i32(data, i), self.i32(data, i+4)))
return tuple(l)
load_dispatch[5] = (8, load_rational)
def load_float(self, data):
a = array.array("f", data)
if self.prefix != native_prefix:
a.byteswap()
return tuple(a)
load_dispatch[11] = (4, load_float)
def load_double(self, data):
a = array.array("d", data)
if self.prefix != native_prefix:
a.byteswap()
return tuple(a)
load_dispatch[12] = (8, load_double)
def load_undefined(self, data):
# Untyped data
return data
load_dispatch[7] = (1, load_undefined)
def load(self, fp):
# load tag dictionary
self.reset()
self.offset = fp.tell()
i16 = self.i16
i32 = self.i32
for i in range(i16(fp.read(2))):
ifd = fp.read(12)
tag, typ = i16(ifd), i16(ifd, 2)
if Image.DEBUG:
from PIL import TiffTags
tagname = TiffTags.TAGS.get(tag, "unknown")
typname = TiffTags.TYPES.get(typ, "unknown")
print("tag: %s (%d)" % (tagname, tag), end=' ')
print("- type: %s (%d)" % (typname, typ), end=' ')
try:
dispatch = self.load_dispatch[typ]
except KeyError:
if Image.DEBUG:
print("- unsupported type", typ)
continue # ignore unsupported type
size, handler = dispatch
size = size * i32(ifd, 4)
# Get and expand tag value
if size > 4:
here = fp.tell()
if Image.DEBUG:
print("Tag Location: %s" % here)
fp.seek(i32(ifd, 8))
if Image.DEBUG:
print("Data Location: %s" % fp.tell())
data = ImageFile._safe_read(fp, size)
fp.seek(here)
else:
data = ifd[8:8+size]
if len(data) != size:
warnings.warn("Possibly corrupt EXIF data. "
"Expecting to read %d bytes but only got %d. "
"Skipping tag %s" % (size, len(data), tag))
continue
self.tagdata[tag] = data
self.tagtype[tag] = typ
if Image.DEBUG:
if tag in (COLORMAP, IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK,
ICCPROFILE, XMP):
print("- value: <table: %d bytes>" % size)
else:
print("- value:", self[tag])
self.next = i32(fp.read(4))
# save primitives
def save(self, fp):
o16 = self.o16
o32 = self.o32
fp.write(o16(len(self.tags)))
# always write in ascending tag order
tags = sorted(self.tags.items())
directory = []
append = directory.append
offset = fp.tell() + len(self.tags) * 12 + 4
stripoffsets = None
# pass 1: convert tags to binary format
for tag, value in tags:
typ = None
if tag in self.tagtype:
typ = self.tagtype[tag]
if Image.DEBUG:
print ("Tag %s, Type: %s, Value: %s" % (tag, typ, value))
if typ == 1:
# byte data
if isinstance(value, tuple):
data = value = value[-1]
else:
data = value
elif typ == 7:
# untyped data
data = value = b"".join(value)
elif isStringType(value[0]):
# string data
if isinstance(value, tuple):
value = value[-1]
typ = 2
# was b'\0'.join(str), which led to \x00a\x00b sorts
# of strings which I don't see in in the wild tiffs
# and doesn't match the tiff spec: 8-bit byte that
# contains a 7-bit ASCII code; the last byte must be
# NUL (binary zero). Also, I don't think this was well
# excersized before.
data = value = b"" + value.encode('ascii', 'replace') + b"\0"
else:
# integer data
if tag == STRIPOFFSETS:
stripoffsets = len(directory)
typ = 4 # to avoid catch-22
elif tag in (X_RESOLUTION, Y_RESOLUTION) or typ == 5:
# identify rational data fields
typ = 5
if isinstance(value[0], tuple):
# long name for flatten
value = tuple(itertools.chain.from_iterable(value))
elif not typ:
typ = 3
for v in value:
if v >= 65536:
typ = 4
if typ == 3:
data = b"".join(map(o16, value))
else:
data = b"".join(map(o32, value))
if Image.DEBUG:
from PIL import TiffTags
tagname = TiffTags.TAGS.get(tag, "unknown")
typname = TiffTags.TYPES.get(typ, "unknown")
print("save: %s (%d)" % (tagname, tag), end=' ')
print("- type: %s (%d)" % (typname, typ), end=' ')
if tag in (COLORMAP, IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK,
ICCPROFILE, XMP):
size = len(data)
print("- value: <table: %d bytes>" % size)
else:
print("- value:", value)
# figure out if data fits into the directory
if len(data) == 4:
append((tag, typ, len(value), data, b""))
elif len(data) < 4:
append((tag, typ, len(value), data + (4-len(data))*b"\0", b""))
else:
count = len(value)
if typ == 5:
count = count // 2 # adjust for rational data field
append((tag, typ, count, o32(offset), data))
offset += len(data)
if offset & 1:
offset += 1 # word padding
# update strip offset data to point beyond auxiliary data
if stripoffsets is not None:
tag, typ, count, value, data = directory[stripoffsets]
assert not data, "multistrip support not yet implemented"
value = o32(self.i32(value) + offset)
directory[stripoffsets] = tag, typ, count, value, data
# pass 2: write directory to file
for tag, typ, count, value, data in directory:
if Image.DEBUG > 1:
print(tag, typ, count, repr(value), repr(data))
fp.write(o16(tag) + o16(typ) + o32(count) + value)
# -- overwrite here for multi-page --
fp.write(b"\0\0\0\0") # end of directory
# pass 3: write auxiliary data to file
for tag, typ, count, value, data in directory:
fp.write(data)
if len(data) & 1:
fp.write(b"\0")
return offset
##
# Image plugin for TIFF files.
class TiffImageFile(ImageFile.ImageFile):
format = "TIFF"
format_description = "Adobe TIFF"
def _open(self):
"Open the first image in a TIFF file"
# Header
ifh = self.fp.read(8)
if ifh[:4] not in PREFIXES:
raise SyntaxError("not a TIFF file")
# image file directory (tag dictionary)
self.tag = self.ifd = ImageFileDirectory(ifh[:2])
# setup frame pointers
self.__first = self.__next = self.ifd.i32(ifh, 4)
self.__frame = -1
self.__fp = self.fp
if Image.DEBUG:
print ("*** TiffImageFile._open ***")
print ("- __first:", self.__first)
print ("- ifh: ", ifh)
# and load the first frame
self._seek(0)
def seek(self, frame):
"Select a given frame as current image"
if frame < 0:
frame = 0
self._seek(frame)
# Create a new core image object on second and
# subsequent frames in the image. Image may be
# different size/mode.
Image._decompression_bomb_check(self.size)
self.im = Image.core.new(self.mode, self.size)
def tell(self):
"Return the current frame number"
return self._tell()
def _seek(self, frame):
self.fp = self.__fp
if frame < self.__frame:
# rewind file
self.__frame = -1
self.__next = self.__first
while self.__frame < frame:
if not self.__next:
raise EOFError("no more images in TIFF file")
if Image.DEBUG:
print("Seeking to frame %s, on frame %s, __next %s, location: %s" %
(frame, self.__frame, self.__next, self.fp.tell()))
# reset python3 buffered io handle in case fp
# was passed to libtiff, invalidating the buffer
self.fp.tell()
self.fp.seek(self.__next)
if Image.DEBUG:
print("Loading tags, location: %s" % self.fp.tell())
self.tag.load(self.fp)
self.__next = self.tag.next
self.__frame += 1
self._setup()
def _tell(self):
return self.__frame
def _decoder(self, rawmode, layer, tile=None):
"Setup decoder contexts"
args = None
if rawmode == "RGB" and self._planar_configuration == 2:
rawmode = rawmode[layer]
compression = self._compression
if compression == "raw":
args = (rawmode, 0, 1)
elif compression == "jpeg":
args = rawmode, ""
if JPEGTABLES in self.tag:
# Hack to handle abbreviated JPEG headers
self.tile_prefix = self.tag[JPEGTABLES]
elif compression == "packbits":
args = rawmode
elif compression == "tiff_lzw":
args = rawmode
if 317 in self.tag:
# Section 14: Differencing Predictor
self.decoderconfig = (self.tag[PREDICTOR][0],)
if ICCPROFILE in self.tag:
self.info['icc_profile'] = self.tag[ICCPROFILE]
return args
def _load_libtiff(self):
""" Overload method triggered when we detect a compressed tiff
Calls out to libtiff """
pixel = Image.Image.load(self)
if self.tile is None:
raise IOError("cannot load this image")
if not self.tile:
return pixel
self.load_prepare()
if not len(self.tile) == 1:
raise IOError("Not exactly one tile")
# (self._compression, (extents tuple),
# 0, (rawmode, self._compression, fp))
ignored, extents, ignored_2, args = self.tile[0]
args = args + (self.ifd.offset,)
decoder = Image._getdecoder(self.mode, 'libtiff', args,
self.decoderconfig)
try:
decoder.setimage(self.im, extents)
except ValueError:
raise IOError("Couldn't set the image")
if hasattr(self.fp, "getvalue"):
# We've got a stringio like thing passed in. Yay for all in memory.
# The decoder needs the entire file in one shot, so there's not
# a lot we can do here other than give it the entire file.
# unless we could do something like get the address of the
# underlying string for stringio.
#
# Rearranging for supporting byteio items, since they have a fileno
# that returns an IOError if there's no underlying fp. Easier to
# dea. with here by reordering.
if Image.DEBUG:
print ("have getvalue. just sending in a string from getvalue")
n, err = decoder.decode(self.fp.getvalue())
elif hasattr(self.fp, "fileno"):
# we've got a actual file on disk, pass in the fp.
if Image.DEBUG:
print ("have fileno, calling fileno version of the decoder.")
self.fp.seek(0)
# 4 bytes, otherwise the trace might error out
n, err = decoder.decode(b"fpfp")
else:
# we have something else.
if Image.DEBUG:
print ("don't have fileno or getvalue. just reading")
# UNDONE -- so much for that buffer size thing.
n, err = decoder.decode(self.fp.read())
self.tile = []
self.readonly = 0
# libtiff closed the fp in a, we need to close self.fp, if possible
if hasattr(self.fp, 'close'):
if not self.__next:
self.fp.close()
self.fp = None # might be shared
if err < 0:
raise IOError(err)
self.load_end()
return Image.Image.load(self)
def _setup(self):
"Setup this image object based on current tags"
if 0xBC01 in self.tag:
raise IOError("Windows Media Photo files not yet supported")
getscalar = self.tag.getscalar
# extract relevant tags
self._compression = COMPRESSION_INFO[getscalar(COMPRESSION, 1)]
self._planar_configuration = getscalar(PLANAR_CONFIGURATION, 1)
# photometric is a required tag, but not everyone is reading
# the specification
photo = getscalar(PHOTOMETRIC_INTERPRETATION, 0)
fillorder = getscalar(FILLORDER, 1)
if Image.DEBUG:
print("*** Summary ***")
print("- compression:", self._compression)
print("- photometric_interpretation:", photo)
print("- planar_configuration:", self._planar_configuration)
print("- fill_order:", fillorder)
# size
xsize = getscalar(IMAGEWIDTH)
ysize = getscalar(IMAGELENGTH)
self.size = xsize, ysize
if Image.DEBUG:
print("- size:", self.size)
format = getscalar(SAMPLEFORMAT, 1)
# mode: check photometric interpretation and bits per pixel
key = (
self.tag.prefix, photo, format, fillorder,
self.tag.get(BITSPERSAMPLE, (1,)),
self.tag.get(EXTRASAMPLES, ())
)
if Image.DEBUG:
print("format key:", key)
try:
self.mode, rawmode = OPEN_INFO[key]
except KeyError:
if Image.DEBUG:
print("- unsupported format")
raise SyntaxError("unknown pixel mode")
if Image.DEBUG:
print("- raw mode:", rawmode)
print("- pil mode:", self.mode)
self.info["compression"] = self._compression
xres = getscalar(X_RESOLUTION, (1, 1))
yres = getscalar(Y_RESOLUTION, (1, 1))
if xres and not isinstance(xres, tuple):
xres = (xres, 1.)
if yres and not isinstance(yres, tuple):
yres = (yres, 1.)
if xres and yres:
xres = xres[0] / (xres[1] or 1)
yres = yres[0] / (yres[1] or 1)
resunit = getscalar(RESOLUTION_UNIT, 1)
if resunit == 2: # dots per inch
self.info["dpi"] = xres, yres
elif resunit == 3: # dots per centimeter. convert to dpi
self.info["dpi"] = xres * 2.54, yres * 2.54
else: # No absolute unit of measurement
self.info["resolution"] = xres, yres
# build tile descriptors
x = y = l = 0
self.tile = []
if STRIPOFFSETS in self.tag:
# striped image
offsets = self.tag[STRIPOFFSETS]
h = getscalar(ROWSPERSTRIP, ysize)
w = self.size[0]
if READ_LIBTIFF or self._compression in ["tiff_ccitt", "group3",
"group4", "tiff_jpeg",
"tiff_adobe_deflate",
"tiff_thunderscan",
"tiff_deflate",
"tiff_sgilog",
"tiff_sgilog24",
"tiff_raw_16"]:
# if Image.DEBUG:
# print "Activating g4 compression for whole file"
# Decoder expects entire file as one tile.
# There's a buffer size limit in load (64k)
# so large g4 images will fail if we use that
# function.
#
# Setup the one tile for the whole image, then
# replace the existing load function with our
# _load_libtiff function.
self.load = self._load_libtiff
# To be nice on memory footprint, if there's a
# file descriptor, use that instead of reading
# into a string in python.
# libtiff closes the file descriptor, so pass in a dup.
try:
fp = hasattr(self.fp, "fileno") and \
os.dup(self.fp.fileno())
# flush the file descriptor, prevents error on pypy 2.4+
# should also eliminate the need for fp.tell for py3
# in _seek
self.fp.flush()
except IOError:
# io.BytesIO have a fileno, but returns an IOError if
# it doesn't use a file descriptor.
fp = False
# libtiff handles the fillmode for us, so 1;IR should
# actually be 1;I. Including the R double reverses the
# bits, so stripes of the image are reversed. See
# https://github.com/python-pillow/Pillow/issues/279
if fillorder == 2:
key = (
self.tag.prefix, photo, format, 1,
self.tag.get(BITSPERSAMPLE, (1,)),
self.tag.get(EXTRASAMPLES, ())
)
if Image.DEBUG:
print("format key:", key)
# this should always work, since all the
# fillorder==2 modes have a corresponding
# fillorder=1 mode
self.mode, rawmode = OPEN_INFO[key]
# libtiff always returns the bytes in native order.
# we're expecting image byte order. So, if the rawmode
# contains I;16, we need to convert from native to image
# byte order.
if self.mode in ('I;16B', 'I;16') and 'I;16' in rawmode:
rawmode = 'I;16N'
# Offset in the tile tuple is 0, we go from 0,0 to
# w,h, and we only do this once -- eds
a = (rawmode, self._compression, fp)
self.tile.append(
(self._compression,
(0, 0, w, ysize),
0, a))
a = None
else:
for i in range(len(offsets)):
a = self._decoder(rawmode, l, i)
self.tile.append(
(self._compression,
(0, min(y, ysize), w, min(y+h, ysize)),
offsets[i], a))
if Image.DEBUG:
print ("tiles: ", self.tile)
y = y + h
if y >= self.size[1]:
x = y = 0
l += 1
a = None
elif TILEOFFSETS in self.tag:
# tiled image
w = getscalar(322)
h = getscalar(323)
a = None
for o in self.tag[TILEOFFSETS]:
if not a:
a = self._decoder(rawmode, l)
# FIXME: this doesn't work if the image size
# is not a multiple of the tile size...
self.tile.append(
(self._compression,
(x, y, x+w, y+h),
o, a))
x = x + w
if x >= self.size[0]:
x, y = 0, y + h
if y >= self.size[1]:
x = y = 0
l += 1
a = None
else:
if Image.DEBUG:
print("- unsupported data organization")
raise SyntaxError("unknown data organization")
# fixup palette descriptor
if self.mode == "P":
palette = [o8(a // 256) for a in self.tag[COLORMAP]]
self.palette = ImagePalette.raw("RGB;L", b"".join(palette))
#
# --------------------------------------------------------------------
# Write TIFF files
# little endian is default except for image modes with
# explict big endian byte-order
SAVE_INFO = {
# mode => rawmode, byteorder, photometrics,
# sampleformat, bitspersample, extra
"1": ("1", II, 1, 1, (1,), None),
"L": ("L", II, 1, 1, (8,), None),
"LA": ("LA", II, 1, 1, (8, 8), 2),
"P": ("P", II, 3, 1, (8,), None),
"PA": ("PA", II, 3, 1, (8, 8), 2),
"I": ("I;32S", II, 1, 2, (32,), None),
"I;16": ("I;16", II, 1, 1, (16,), None),
"I;16S": ("I;16S", II, 1, 2, (16,), None),
"F": ("F;32F", II, 1, 3, (32,), None),
"RGB": ("RGB", II, 2, 1, (8, 8, 8), None),
"RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0),
"RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2),
"CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None),
"YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None),
"LAB": ("LAB", II, 8, 1, (8, 8, 8), None),
"I;32BS": ("I;32BS", MM, 1, 2, (32,), None),
"I;16B": ("I;16B", MM, 1, 1, (16,), None),
"I;16BS": ("I;16BS", MM, 1, 2, (16,), None),
"F;32BF": ("F;32BF", MM, 1, 3, (32,), None),
}
def _cvt_res(value):
# convert value to TIFF rational number -- (numerator, denominator)
if isinstance(value, collections.Sequence):
assert(len(value) % 2 == 0)
return value
if isinstance(value, int):
return (value, 1)
value = float(value)
return (int(value * 65536), 65536)
def _save(im, fp, filename):
try:
rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode]
except KeyError:
raise IOError("cannot write mode %s as TIFF" % im.mode)
ifd = ImageFileDirectory(prefix)
compression = im.encoderinfo.get('compression', im.info.get('compression',
'raw'))
libtiff = WRITE_LIBTIFF or compression != 'raw'
# required for color libtiff images
ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1)
# -- multi-page -- skip TIFF header on subsequent pages
if not libtiff and fp.tell() == 0:
# tiff header (write via IFD to get everything right)
# PIL always starts the first IFD at offset 8
fp.write(ifd.prefix + ifd.o16(42) + ifd.o32(8))
ifd[IMAGEWIDTH] = im.size[0]
ifd[IMAGELENGTH] = im.size[1]
# write any arbitrary tags passed in as an ImageFileDirectory
info = im.encoderinfo.get("tiffinfo", {})
if Image.DEBUG:
print("Tiffinfo Keys: %s" % info.keys)
keys = list(info.keys())
for key in keys:
ifd[key] = info.get(key)
try:
ifd.tagtype[key] = info.tagtype[key]
except:
pass # might not be an IFD, Might not have populated type
# additions written by Greg Couch, gregc@cgl.ucsf.edu
# inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com
if hasattr(im, 'tag'):
# preserve tags from original TIFF image file
for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION,
IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP):
if key in im.tag:
ifd[key] = im.tag[key]
ifd.tagtype[key] = im.tag.tagtype.get(key, None)
# preserve ICC profile (should also work when saving other formats
# which support profiles as TIFF) -- 2008-06-06 Florian Hoech
if "icc_profile" in im.info:
ifd[ICCPROFILE] = im.info["icc_profile"]
for key, name, cvt in [
(IMAGEDESCRIPTION, "description", lambda x: x),
(X_RESOLUTION, "resolution", _cvt_res),
(Y_RESOLUTION, "resolution", _cvt_res),
(X_RESOLUTION, "x_resolution", _cvt_res),
(Y_RESOLUTION, "y_resolution", _cvt_res),
(RESOLUTION_UNIT, "resolution_unit",
lambda x: {"inch": 2, "cm": 3, "centimeter": 3}.get(x, 1)),
(SOFTWARE, "software", lambda x: x),
(DATE_TIME, "date_time", lambda x: x),
(ARTIST, "artist", lambda x: x),
(COPYRIGHT, "copyright", lambda x: x)]:
name_with_spaces = name.replace("_", " ")
if "_" in name and name_with_spaces in im.encoderinfo:
warnings.warn("%r is deprecated; use %r instead" %
(name_with_spaces, name), DeprecationWarning)
ifd[key] = cvt(im.encoderinfo[name.replace("_", " ")])
if name in im.encoderinfo:
ifd[key] = cvt(im.encoderinfo[name])
dpi = im.encoderinfo.get("dpi")
if dpi:
ifd[RESOLUTION_UNIT] = 2
ifd[X_RESOLUTION] = _cvt_res(dpi[0])
ifd[Y_RESOLUTION] = _cvt_res(dpi[1])
if bits != (1,):
ifd[BITSPERSAMPLE] = bits
if len(bits) != 1:
ifd[SAMPLESPERPIXEL] = len(bits)
if extra is not None:
ifd[EXTRASAMPLES] = extra
if format != 1:
ifd[SAMPLEFORMAT] = format
ifd[PHOTOMETRIC_INTERPRETATION] = photo
if im.mode == "P":
lut = im.im.getpalette("RGB", "RGB;L")
ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut)
# data orientation
stride = len(bits) * ((im.size[0]*bits[0]+7)//8)
ifd[ROWSPERSTRIP] = im.size[1]
ifd[STRIPBYTECOUNTS] = stride * im.size[1]
ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer
# no compression by default:
ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1)
if libtiff:
if Image.DEBUG:
print ("Saving using libtiff encoder")
print (ifd.items())
_fp = 0
if hasattr(fp, "fileno"):
try:
fp.seek(0)
_fp = os.dup(fp.fileno())
except io.UnsupportedOperation:
pass
# ICC Profile crashes.
blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS, ROWSPERSTRIP, ICCPROFILE]
atts = {}
# bits per sample is a single short in the tiff directory, not a list.
atts[BITSPERSAMPLE] = bits[0]
# Merge the ones that we have with (optional) more bits from
# the original file, e.g x,y resolution so that we can
# save(load('')) == original file.
for k, v in itertools.chain(ifd.items(),
getattr(im, 'ifd', {}).items()):
if k not in atts and k not in blocklist:
if type(v[0]) == tuple and len(v) > 1:
# A tuple of more than one rational tuples
# flatten to floats,
# following tiffcp.c->cpTag->TIFF_RATIONAL
atts[k] = [float(elt[0])/float(elt[1]) for elt in v]
continue
if type(v[0]) == tuple and len(v) == 1:
# A tuple of one rational tuples
# flatten to floats,
# following tiffcp.c->cpTag->TIFF_RATIONAL
atts[k] = float(v[0][0])/float(v[0][1])
continue
if (type(v) == tuple and
(len(v) > 2 or
(len(v) == 2 and v[1] == 0))):
# List of ints?
# Avoid divide by zero in next if-clause
if type(v[0]) in (int, float):
atts[k] = list(v)
continue
if type(v) == tuple and len(v) == 2:
# one rational tuple
# flatten to float,
# following tiffcp.c->cpTag->TIFF_RATIONAL
atts[k] = float(v[0])/float(v[1])
continue
if type(v) == tuple and len(v) == 1:
v = v[0]
# drop through
if isStringType(v):
atts[k] = bytes(v.encode('ascii', 'replace')) + b"\0"
continue
else:
# int or similar
atts[k] = v
if Image.DEBUG:
print (atts)
# libtiff always expects the bytes in native order.
# we're storing image byte order. So, if the rawmode
# contains I;16, we need to convert from native to image
# byte order.
if im.mode in ('I;16B', 'I;16'):
rawmode = 'I;16N'
a = (rawmode, compression, _fp, filename, atts)
# print (im.mode, compression, a, im.encoderconfig)
e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig)
e.setimage(im.im, (0, 0)+im.size)
while True:
# undone, change to self.decodermaxblock:
l, s, d = e.encode(16*1024)
if not _fp:
fp.write(d)
if s:
break
if s < 0:
raise IOError("encoder error %d when writing image file" % s)
else:
offset = ifd.save(fp)
ImageFile._save(im, fp, [
("raw", (0, 0)+im.size, offset, (rawmode, stride, 1))
])
# -- helper for multi-page save --
if "_debug_multipage" in im.encoderinfo:
# just to access o32 and o16 (using correct byte order)
im._debug_multipage = ifd
#
# --------------------------------------------------------------------
# Register
Image.register_open("TIFF", TiffImageFile, _accept)
Image.register_save("TIFF", _save)
Image.register_extension("TIFF", ".tif")
Image.register_extension("TIFF", ".tiff")
Image.register_mime("TIFF", "image/tiff")
|
ctruchi/deluge-webui2
|
refs/heads/master
|
deluge/ui/gtkui/new_release_dialog.py
|
7
|
#
# new_release_dialog.py
#
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
#
import deluge.component as component
import deluge.common
from deluge.configmanager import ConfigManager
from deluge.ui.client import client
class NewReleaseDialog:
def __init__(self):
pass
def show(self, available_version):
self.config = ConfigManager("gtkui.conf")
builder = component.get("MainWindow").get_builder()
self.dialog = builder.get_object("new_release_dialog")
# Set the version labels
if deluge.common.windows_check() or deluge.common.osx_check():
builder.get_object("image_new_release").set_from_file(
deluge.common.get_pixmap("deluge16.png"))
else:
builder.get_object("image_new_release").set_from_icon_name("deluge", 4)
builder.get_object("label_available_version").set_text(available_version)
builder.get_object("label_client_version").set_text(
deluge.common.get_version())
self.chk_not_show_dialog = builder.get_object("chk_do_not_show_new_release")
builder.get_object("button_goto_downloads").connect(
"clicked", self._on_button_goto_downloads)
builder.get_object("button_close_new_release").connect(
"clicked", self._on_button_close_new_release)
if client.connected():
def on_info(version):
builder.get_object("label_server_version").set_text(version)
builder.get_object("label_server_version").show()
builder.get_object("label_server_version_text").show()
if not client.is_classicmode():
builder.get_object("label_client_version_text").set_label(_("<i>Client Version</i>"))
client.daemon.info().addCallback(on_info)
self.dialog.show()
def _on_button_goto_downloads(self, widget):
deluge.common.open_url_in_browser("http://deluge-torrent.org")
self.config["show_new_releases"] = not self.chk_not_show_dialog.get_active()
self.dialog.destroy()
def _on_button_close_new_release(self, widget):
self.config["show_new_releases"] = not self.chk_not_show_dialog.get_active()
self.dialog.destroy()
|
halfak/Deltas
|
refs/heads/master
|
deltas/algorithms/tests/test_segment_matcher.py
|
1
|
from nose.tools import eq_
from ...operations import Delete, Equal, Insert
from ...tests.diff_and_replay import diff_and_replay
from ...tests.diff_sequence import diff_sequence
from ...tokenizers import wikitext_split
from ..segment_matcher import diff, process
def test_diff_and_replay():
return diff_and_replay(diff)
def test_engine():
return diff_sequence(process)
def test_easy_diff():
a = "Apples are red."
b = "Apples are tasty and red."
operation_tokens = process([a, b], tokenizer=wikitext_split)
# Apples are red.
next(operation_tokens)
# Apples are tasty and red.
operations, a, b = next(operation_tokens)
eq_(
list(operations),
[
Equal(0, 4, 0, 4),
Insert(4, 4, 4, 8),
Equal(4, 6, 8, 10)
]
)
def test_sentence_sandwich():
a = """==Senior Tours==
Golfers over the age of fifty are eligible to compete in senior touraments.
[[Golf]] is unique among [[sports]] in having high profile and lucrative
competitions for this age group. Nearly all of the famous golfers who are
eligible to compete in these events choose to do so these events, unless
they are unable to for health reasons. The two main tours are:
*[[Champions Tour]] (based in the [[United States]]}
*[[European Seniors Tour]]"""
b = """==Senior Tours==
Golfers over the age of fifty are eligible to compete in senior touraments.
[[Golf]] is unique among [[sports]] in having high profile and lucrative
competitions for this age group. Nearly all of the famous golfers who are
eligible to compete in these events choose to do so, unless
they are unable to for health reasons. The two main tours are:
*[[Champions Tour]] (based in the [[United States]]}
*[[European Seniors Tour]]"""
operation_tokens = process([a, a, b], tokenizer=wikitext_split)
operations, a, b = next(operation_tokens)
operations, a, b = next(operation_tokens)
operations, a, b = next(operation_tokens)
eq_(
list(operations),
[Equal(name='equal', a1=0, a2=105, b1=0, b2=105),
Delete(name='delete', a1=105, a2=109, b1=105, b2=105),
Equal(name='equal', a1=109, a2=168, b1=105, b2=164)]
)
def test_revisions():
from ...segmenters import ParagraphsSentencesAndWhitespace
ParagraphsSentencesAndWhitespace()
a = """
{| class="wikitable" |}
#"Huger than Huge" – ''Jordan''
""" # noqa
b = """
{| class="wikitable" |}
#"Huger than Huge" – ''Jordan (of Dan and Jordan)''
""" # noqa
at = wikitext_split.tokenize(a)
bt = wikitext_split.tokenize(b)
operations = diff(at, bt)
added_content = ", ".join("".join(bt[i] for i in range(op.b1, op.b2))
for op in operations if op.name == "insert")
eq_(added_content, " (of Dan and Jordan)")
|
gonicus/gosa
|
refs/heads/master
|
backend/src/tests/backend/plugins/samba/test_domain.py
|
1
|
# This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
from unittest import TestCase, mock
import pytest
from gosa.backend.plugins.samba.domain import *
class SambaGuiMethodsTestCase(TestCase):
@mock.patch.object(Environment, "getInstance")
@mock.patch.object(PluginRegistry, 'getInstance')
def test_getSambaPassword(self, mockedRegistry, mockedEnv):
# mockup ACL resolver
mockedRegistry.return_value.check.return_value = True
# mockup the environment
mockedEnv.return_value.domain = "testdomain"
with mock.patch('gosa.backend.plugins.samba.domain.ObjectProxy', autoSpec=True, create=True) as m:
# run the test
user = m.return_value
methods = SambaGuiMethods()
methods.setSambaPassword("username", "dn", "password")
assert user.sambaNTPassword is not None
assert user.sambaLMPassword is not None
assert user.commit.called is True
assert m.called is True
# test with ACL.check for sambaNTPassword is False
mockedRegistry.return_value.check.return_value = False
with mock.patch('gosa.backend.plugins.samba.domain.ObjectProxy', create=True):
# run the test
methods = SambaGuiMethods()
with pytest.raises(ACLException):
methods.setSambaPassword("username", "dn", "password")
# test with ACL.check for sambaLMPassword is False
def check(user, topic, flags, base):
return not topic == "testdomain.objects.User.attributes.sambaLMPassword"
mockedRegistry.return_value.check.side_effect = check
with mock.patch('gosa.backend.plugins.samba.domain.ObjectProxy', create=True):
# run the test
methods = SambaGuiMethods()
with pytest.raises(ACLException):
methods.setSambaPassword("username", "dn", "password")
@mock.patch.object(PluginRegistry, 'getInstance')
def test_getSambaDomainInformation(self, mockedInstance):
# mock the whole lookup in the ObjectIndex to return True
mockedInstance.return_value.search.return_value = [{"sambaMinPwdLength": 6,
"sambaPwdHistoryLength": 10,
"sambaMaxPwdAge": 10,
"sambaMinPwdAge": 1,
"sambaLockoutDuration": 60,
"sambaRefuseMachinePwdChange": False,
"sambaLogonToChgPwd": True,
"sambaLockoutThreshold": 30,
"sambaBadPasswordTime": 2147483647}]
methods = SambaGuiMethods()
target = mock.MagicMock()
target.sambaDomainName = 'DEFAULT'
res = methods.getSambaDomainInformation("username", target)
# this is just a check that the method is callable so we do not really check the output here
assert len(res) > 0
@mock.patch.object(PluginRegistry, 'getInstance')
def test_IsValidSambaDomainName(mockedInstance):
# mock the whole lookup in the ObjectIndex to return True
mockedInstance.return_value.search.return_value = [1]
check = IsValidSambaDomainName()
(res, errors) = check.process(None, None, ["test"])
assert res is True
assert len(errors) == 0
# mockup everything to return False
mockedInstance.return_value.search.return_value = []
(res, errors) = check.process(None, None, ["test"])
assert res is False
assert len(errors) == 1
|
mdanielwork/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/contrib/localflavor/ch/ch_states.py
|
544
|
# -*- coding: utf-8 -*
from django.utils.translation import ugettext_lazy as _
STATE_CHOICES = (
('AG', _('Aargau')),
('AI', _('Appenzell Innerrhoden')),
('AR', _('Appenzell Ausserrhoden')),
('BS', _('Basel-Stadt')),
('BL', _('Basel-Land')),
('BE', _('Berne')),
('FR', _('Fribourg')),
('GE', _('Geneva')),
('GL', _('Glarus')),
('GR', _('Graubuenden')),
('JU', _('Jura')),
('LU', _('Lucerne')),
('NE', _('Neuchatel')),
('NW', _('Nidwalden')),
('OW', _('Obwalden')),
('SH', _('Schaffhausen')),
('SZ', _('Schwyz')),
('SO', _('Solothurn')),
('SG', _('St. Gallen')),
('TG', _('Thurgau')),
('TI', _('Ticino')),
('UR', _('Uri')),
('VS', _('Valais')),
('VD', _('Vaud')),
('ZG', _('Zug')),
('ZH', _('Zurich'))
)
|
BehavioralInsightsTeam/edx-platform
|
refs/heads/release-bit
|
lms/djangoapps/edxnotes/api_urls.py
|
13
|
"""
API URLs for EdxNotes
"""
from django.conf.urls import url
from edxnotes import views
urlpatterns = [
url(r"^retire_user/$", views.RetireUserView.as_view(), name="edxnotes_retire_user"),
]
|
louietsai/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Tools/scripts/parseentities.py
|
46
|
#!/usr/bin/env python3
""" Utility for parsing HTML entity definitions available from:
http://www.w3.org/ as e.g.
http://www.w3.org/TR/REC-html40/HTMLlat1.ent
Input is read from stdin, output is written to stdout in form of a
Python snippet defining a dictionary "entitydefs" mapping literal
entity name to character or numeric entity.
Marc-Andre Lemburg, mal@lemburg.com, 1999.
Use as you like. NO WARRANTIES.
"""
import re,sys
import TextTools
entityRE = re.compile('<!ENTITY +(\w+) +CDATA +"([^"]+)" +-- +((?:.|\n)+?) *-->')
def parse(text,pos=0,endpos=None):
pos = 0
if endpos is None:
endpos = len(text)
d = {}
while 1:
m = entityRE.search(text,pos,endpos)
if not m:
break
name,charcode,comment = m.groups()
d[name] = charcode,comment
pos = m.end()
return d
def writefile(f,defs):
f.write("entitydefs = {\n")
items = sorted(defs.items())
for name, (charcode,comment) in items:
if charcode[:2] == '&#':
code = int(charcode[2:-1])
if code < 256:
charcode = "'\%o'" % code
else:
charcode = repr(charcode)
else:
charcode = repr(charcode)
comment = TextTools.collapse(comment)
f.write(" '%s':\t%s, \t# %s\n" % (name,charcode,comment))
f.write('\n}\n')
if __name__ == '__main__':
if len(sys.argv) > 1:
infile = open(sys.argv[1])
else:
infile = sys.stdin
if len(sys.argv) > 2:
outfile = open(sys.argv[2],'w')
else:
outfile = sys.stdout
text = infile.read()
defs = parse(text)
writefile(outfile,defs)
|
manuq/sugar-toolkit-gtk3
|
refs/heads/master
|
src/sugar3/presence/presenceservice.py
|
7
|
# Copyright (C) 2007, Red Hat, Inc.
# Copyright (C) 2010 Collabora Ltd. <http://www.collabora.co.uk/>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
"""
STABLE.
"""
import logging
from gi.repository import GObject
import dbus
import dbus.exceptions
from dbus import PROPERTIES_IFACE
from sugar3.presence.buddy import Buddy, Owner
from sugar3.presence.activity import Activity
from sugar3.presence.connectionmanager import get_connection_manager
from telepathy.interfaces import ACCOUNT, \
ACCOUNT_MANAGER, \
CONNECTION
from telepathy.constants import HANDLE_TYPE_CONTACT
_logger = logging.getLogger('sugar3.presence.presenceservice')
ACCOUNT_MANAGER_SERVICE = 'org.freedesktop.Telepathy.AccountManager'
ACCOUNT_MANAGER_PATH = '/org/freedesktop/Telepathy/AccountManager'
CONN_INTERFACE_ACTIVITY_PROPERTIES = 'org.laptop.Telepathy.ActivityProperties'
class PresenceService(GObject.GObject):
"""Provides simplified access to the Telepathy framework to activities"""
__gsignals__ = {
'activity-shared': (GObject.SignalFlags.RUN_FIRST, None,
([GObject.TYPE_PYOBJECT, GObject.TYPE_PYOBJECT,
GObject.TYPE_PYOBJECT])),
}
def __init__(self):
"""Initialise the service and attempt to connect to events
"""
GObject.GObject.__init__(self)
self._activity_cache = None
self._buddy_cache = {}
def get_activity(self, activity_id, warn_if_none=True):
"""Retrieve single Activity object for the given unique id
activity_id -- unique ID for the activity
returns single Activity object or None if the activity
is not found using GetActivityById on the service
"""
if self._activity_cache is not None:
if self._activity_cache.props.id != activity_id:
raise RuntimeError('Activities can only access their own'
' shared instance')
return self._activity_cache
else:
connection_manager = get_connection_manager()
connections_per_account = \
connection_manager.get_connections_per_account()
for account_path, connection in connections_per_account.items():
if not connection.connected:
continue
logging.debug('Calling GetActivity on %s' % account_path)
try:
room_handle = connection.connection.GetActivity(
activity_id,
dbus_interface=CONN_INTERFACE_ACTIVITY_PROPERTIES)
except dbus.exceptions.DBusException, e:
name = 'org.freedesktop.Telepathy.Error.NotAvailable'
if e.get_dbus_name() == name:
logging.debug("There's no shared activity with the id "
"%s" % activity_id)
else:
raise
else:
activity = Activity(account_path, connection.connection,
room_handle=room_handle)
self._activity_cache = activity
return activity
return None
def get_activity_by_handle(self, connection_path, room_handle):
if self._activity_cache is not None:
if self._activity_cache.room_handle != room_handle:
raise RuntimeError('Activities can only access their own'
' shared instance')
return self._activity_cache
else:
connection_manager = get_connection_manager()
account_path = \
connection_manager.get_account_for_connection(connection_path)
connection_name = connection_path.replace('/', '.')[1:]
bus = dbus.SessionBus()
connection = bus.get_object(connection_name, connection_path)
activity = Activity(account_path, connection,
room_handle=room_handle)
self._activity_cache = activity
return activity
def get_buddy(self, account_path, contact_id):
if (account_path, contact_id) in self._buddy_cache:
return self._buddy_cache[(account_path, contact_id)]
buddy = Buddy(account_path, contact_id)
self._buddy_cache[(account_path, contact_id)] = buddy
return buddy
# DEPRECATED
def get_buddy_by_telepathy_handle(self, tp_conn_name, tp_conn_path,
handle):
"""Retrieve single Buddy object for the given public key
:Parameters:
`tp_conn_name` : str
The well-known bus name of a Telepathy connection
`tp_conn_path` : dbus.ObjectPath
The object path of the Telepathy connection
`handle` : int or long
The handle of a Telepathy contact on that connection,
of type HANDLE_TYPE_CONTACT. This may not be a
channel-specific handle.
:Returns: the Buddy object, or None if the buddy is not found
"""
bus = dbus.Bus()
obj = bus.get_object(ACCOUNT_MANAGER_SERVICE, ACCOUNT_MANAGER_PATH)
account_manager = dbus.Interface(obj, ACCOUNT_MANAGER)
account_paths = account_manager.Get(ACCOUNT_MANAGER, 'ValidAccounts',
dbus_interface=PROPERTIES_IFACE)
for account_path in account_paths:
obj = bus.get_object(ACCOUNT_MANAGER_SERVICE, account_path)
connection_path = obj.Get(ACCOUNT, 'Connection')
if connection_path == tp_conn_path:
connection_name = connection_path.replace('/', '.')[1:]
connection = bus.get_object(connection_name, connection_path)
contact_ids = connection.InspectHandles(
HANDLE_TYPE_CONTACT,
[handle],
dbus_interface=CONNECTION)
return self.get_buddy(account_path, contact_ids[0])
raise ValueError('Unknown buddy in connection %s with handle %d' %
(tp_conn_path, handle))
def get_owner(self):
"""Retrieves the laptop Buddy object."""
return Owner()
def __share_activity_cb(self, activity):
"""Finish sharing the activity
"""
self.emit('activity-shared', True, activity, None)
def __share_activity_error_cb(self, activity, error):
"""Notify with GObject event of unsuccessful sharing of activity
"""
self.emit('activity-shared', False, activity, error)
def share_activity(self, activity, properties=None, private=True):
if properties is None:
properties = {}
if 'id' not in properties:
properties['id'] = activity.get_id()
if 'type' not in properties:
properties['type'] = activity.get_bundle_id()
if 'name' not in properties:
properties['name'] = activity.metadata.get('title', None)
if 'color' not in properties:
properties['color'] = activity.metadata.get('icon-color', None)
properties['private'] = private
if self._activity_cache is not None:
raise ValueError('Activity %s is already tracked' %
activity.get_id())
connection_manager = get_connection_manager()
account_path, connection = \
connection_manager.get_preferred_connection()
if connection is None:
self.emit('activity-shared', False, None,
'No active connection available')
return
shared_activity = Activity(account_path, connection,
properties=properties)
self._activity_cache = shared_activity
if shared_activity.props.joined:
raise RuntimeError('Activity %s is already shared.' %
activity.props.id)
shared_activity.share(self.__share_activity_cb,
self.__share_activity_error_cb)
def get_preferred_connection(self):
"""Gets the preferred telepathy connection object that an activity
should use when talking directly to telepathy
returns the bus name and the object path of the Telepathy connection
"""
manager = get_connection_manager()
account_path, connection = manager.get_preferred_connection()
if connection is None:
return None
else:
return connection.requested_bus_name, connection.object_path
# DEPRECATED
def get(self, object_path):
raise NotImplementedError()
# DEPRECATED
def get_activities(self):
raise NotImplementedError()
# DEPRECATED
def get_activities_async(self, reply_handler=None, error_handler=None):
raise NotImplementedError()
# DEPRECATED
def get_buddies(self):
raise NotImplementedError()
# DEPRECATED
def get_buddies_async(self, reply_handler=None, error_handler=None):
raise NotImplementedError()
_ps = None
def get_instance(allow_offline_iface=False):
"""Retrieve this process' view of the PresenceService"""
global _ps
if not _ps:
_ps = PresenceService()
return _ps
|
heathseals/CouchPotatoServer
|
refs/heads/master
|
libs/pyutil/test/current/json_tests/test_pass2.py
|
106
|
from unittest import TestCase
from pyutil import jsonutil as json
# from http://json.org/JSON_checker/test/pass2.json
JSON = r'''
[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]]
'''
class TestPass2(TestCase):
def test_parse(self):
# test in/out equivalence and parsing
res = json.loads(JSON)
out = json.dumps(res)
self.assertEquals(res, json.loads(out))
|
slideclick/mal
|
refs/heads/master
|
python/step0_repl.py
|
54
|
import sys, traceback
import mal_readline
# read
def READ(str):
return str
# eval
def EVAL(ast, env):
#print("EVAL %s" % printer._pr_str(ast))
return ast
# print
def PRINT(exp):
return exp
# repl
def REP(str):
return PRINT(EVAL(READ(str), {}))
# repl loop
while True:
try:
line = mal_readline.readline("user> ")
if line == None: break
if line == "": continue
print(REP(line))
except Exception as e:
print("".join(traceback.format_exception(*sys.exc_info())))
|
dawran6/zulip
|
refs/heads/master
|
zerver/migrations/0043_realm_filter_validators.py
|
38
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
import zerver.models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0042_attachment_file_name_length'),
]
operations = [
migrations.AlterField(
model_name='realmfilter',
name='pattern',
field=models.TextField(validators=[zerver.models.filter_pattern_validator]),
),
migrations.AlterField(
model_name='realmfilter',
name='url_format_string',
field=models.TextField(validators=[django.core.validators.URLValidator, zerver.models.filter_format_validator]),
),
]
|
claricen/website
|
refs/heads/master
|
publishconf.py
|
9
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = '//wics.uwaterloo.ca'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
bnprk/django-oscar
|
refs/heads/master
|
runtests.py
|
25
|
#!/usr/bin/env python
"""
Custom test runner
If args or options, we run the testsuite as quickly as possible.
If args but no options, we default to using the spec plugin and aborting on
first error/failure.
If options, we ignore defaults and pass options onto pytest.
Examples:
Run all tests (as fast as possible)
$ ./runtests.py
Run all unit tests (using spec output)
$ ./runtests.py tests/unit
Run all checkout unit tests (using spec output)
$ ./runtests.py tests/unit/checkout
Re-run failing tests (requires pytest-cache)
$ ./runtests.py ... --lf
Drop into pdb when a test fails
$ ./runtests.py ... --pdb
"""
import os
import multiprocessing
import sys
import logging
import warnings
import pytest
from django.utils.six.moves import map
# No logging
logging.disable(logging.CRITICAL)
if __name__ == '__main__':
args = sys.argv[1:]
verbosity = 1
if not args:
# If run with no args, try and run the testsuite as fast as possible.
# That means across all cores and with no high-falutin' plugins.
try:
cpu_count = int(multiprocessing.cpu_count())
except ValueError:
cpu_count = 1
args = [
'--capture=no', '--nomigrations', '-n=%d' % cpu_count,
'tests'
]
else:
# Some args/options specified. Check to see if any options have
# been specified. If they have, then don't set any
has_options = any(map(lambda x: x.startswith('--'), args))
if not has_options:
# Default options:
# --exitfirst Abort on first error/failure
# --capture=no Don't capture STDOUT
args.extend(['--capture=no', '--nomigrations', '--exitfirst'])
else:
args = [arg for arg in args if not arg.startswith('-')]
with warnings.catch_warnings():
# The warnings module in default configuration will never cause tests
# to fail, as it never raises an exception. We alter that behaviour by
# turning DeprecationWarnings into exceptions, but exclude warnings
# triggered by third-party libs. Note: The context manager is not
# thread safe. Behaviour with multiple threads is undefined.
warnings.filterwarnings('error', category=DeprecationWarning)
warnings.filterwarnings('error', category=RuntimeWarning)
libs = r'(sorl\.thumbnail.*|bs4.*|webtest.*)'
warnings.filterwarnings(
'ignore', r'.*', DeprecationWarning, libs)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings')
result_code = pytest.main(args)
sys.exit(result_code)
|
jmankoff/data
|
refs/heads/master
|
Assignments/jmankoff-fusion/lib/werkzeug/contrib/lint.py
|
128
|
# -*- coding: utf-8 -*-
"""
werkzeug.contrib.lint
~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.5
This module provides a middleware that performs sanity checks of the WSGI
application. It checks that :pep:`333` is properly implemented and warns
on some common HTTP errors such as non-empty responses for 304 status
codes.
This module provides a middleware, the :class:`LintMiddleware`. Wrap your
application with it and it will warn about common problems with WSGI and
HTTP while your application is running.
It's strongly recommended to use it during development.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from urlparse import urlparse
from warnings import warn
from werkzeug.datastructures import Headers
from werkzeug.http import is_entity_header
from werkzeug.wsgi import FileWrapper
from werkzeug._compat import string_types
class WSGIWarning(Warning):
"""Warning class for WSGI warnings."""
class HTTPWarning(Warning):
"""Warning class for HTTP warnings."""
def check_string(context, obj, stacklevel=3):
if type(obj) is not str:
warn(WSGIWarning('%s requires bytestrings, got %s' %
(context, obj.__class__.__name__)))
class InputStream(object):
def __init__(self, stream):
self._stream = stream
def read(self, *args):
if len(args) == 0:
warn(WSGIWarning('wsgi does not guarantee an EOF marker on the '
'input stream, thus making calls to '
'wsgi.input.read() unsafe. Conforming servers '
'may never return from this call.'),
stacklevel=2)
elif len(args) != 1:
warn(WSGIWarning('too many parameters passed to wsgi.input.read()'),
stacklevel=2)
return self._stream.read(*args)
def readline(self, *args):
if len(args) == 0:
warn(WSGIWarning('Calls to wsgi.input.readline() without arguments'
' are unsafe. Use wsgi.input.read() instead.'),
stacklevel=2)
elif len(args) == 1:
warn(WSGIWarning('wsgi.input.readline() was called with a size hint. '
'WSGI does not support this, although it\'s available '
'on all major servers.'),
stacklevel=2)
else:
raise TypeError('too many arguments passed to wsgi.input.readline()')
return self._stream.readline(*args)
def __iter__(self):
try:
return iter(self._stream)
except TypeError:
warn(WSGIWarning('wsgi.input is not iterable.'), stacklevel=2)
return iter(())
def close(self):
warn(WSGIWarning('application closed the input stream!'),
stacklevel=2)
self._stream.close()
class ErrorStream(object):
def __init__(self, stream):
self._stream = stream
def write(self, s):
check_string('wsgi.error.write()', s)
self._stream.write(s)
def flush(self):
self._stream.flush()
def writelines(self, seq):
for line in seq:
self.write(seq)
def close(self):
warn(WSGIWarning('application closed the error stream!'),
stacklevel=2)
self._stream.close()
class GuardedWrite(object):
def __init__(self, write, chunks):
self._write = write
self._chunks = chunks
def __call__(self, s):
check_string('write()', s)
self._write.write(s)
self._chunks.append(len(s))
class GuardedIterator(object):
def __init__(self, iterator, headers_set, chunks):
self._iterator = iterator
self._next = iter(iterator).next
self.closed = False
self.headers_set = headers_set
self.chunks = chunks
def __iter__(self):
return self
def next(self):
if self.closed:
warn(WSGIWarning('iterated over closed app_iter'),
stacklevel=2)
rv = self._next()
if not self.headers_set:
warn(WSGIWarning('Application returned before it '
'started the response'), stacklevel=2)
check_string('application iterator items', rv)
self.chunks.append(len(rv))
return rv
def close(self):
self.closed = True
if hasattr(self._iterator, 'close'):
self._iterator.close()
if self.headers_set:
status_code, headers = self.headers_set
bytes_sent = sum(self.chunks)
content_length = headers.get('content-length', type=int)
if status_code == 304:
for key, value in headers:
key = key.lower()
if key not in ('expires', 'content-location') and \
is_entity_header(key):
warn(HTTPWarning('entity header %r found in 304 '
'response' % key))
if bytes_sent:
warn(HTTPWarning('304 responses must not have a body'))
elif 100 <= status_code < 200 or status_code == 204:
if content_length != 0:
warn(HTTPWarning('%r responses must have an empty '
'content length') % status_code)
if bytes_sent:
warn(HTTPWarning('%r responses must not have a body' %
status_code))
elif content_length is not None and content_length != bytes_sent:
warn(WSGIWarning('Content-Length and the number of bytes '
'sent to the client do not match.'))
def __del__(self):
if not self.closed:
try:
warn(WSGIWarning('Iterator was garbage collected before '
'it was closed.'))
except Exception:
pass
class LintMiddleware(object):
"""This middleware wraps an application and warns on common errors.
Among other thing it currently checks for the following problems:
- invalid status codes
- non-bytestrings sent to the WSGI server
- strings returned from the WSGI application
- non-empty conditional responses
- unquoted etags
- relative URLs in the Location header
- unsafe calls to wsgi.input
- unclosed iterators
Detected errors are emitted using the standard Python :mod:`warnings`
system and usually end up on :data:`stderr`.
::
from werkzeug.contrib.lint import LintMiddleware
app = LintMiddleware(app)
:param app: the application to wrap
"""
def __init__(self, app):
self.app = app
def check_environ(self, environ):
if type(environ) is not dict:
warn(WSGIWarning('WSGI environment is not a standard python dict.'),
stacklevel=4)
for key in ('REQUEST_METHOD', 'SERVER_NAME', 'SERVER_PORT',
'wsgi.version', 'wsgi.input', 'wsgi.errors',
'wsgi.multithread', 'wsgi.multiprocess',
'wsgi.run_once'):
if key not in environ:
warn(WSGIWarning('required environment key %r not found'
% key), stacklevel=3)
if environ['wsgi.version'] != (1, 0):
warn(WSGIWarning('environ is not a WSGI 1.0 environ'),
stacklevel=3)
script_name = environ.get('SCRIPT_NAME', '')
if script_name and script_name[:1] != '/':
warn(WSGIWarning('SCRIPT_NAME does not start with a slash: %r'
% script_name), stacklevel=3)
path_info = environ.get('PATH_INFO', '')
if path_info[:1] != '/':
warn(WSGIWarning('PATH_INFO does not start with a slash: %r'
% path_info), stacklevel=3)
def check_start_response(self, status, headers, exc_info):
check_string('status', status)
status_code = status.split(None, 1)[0]
if len(status_code) != 3 or not status_code.isdigit():
warn(WSGIWarning('Status code must be three digits'), stacklevel=3)
if len(status) < 4 or status[3] != ' ':
warn(WSGIWarning('Invalid value for status %r. Valid '
'status strings are three digits, a space '
'and a status explanation'), stacklevel=3)
status_code = int(status_code)
if status_code < 100:
warn(WSGIWarning('status code < 100 detected'), stacklevel=3)
if type(headers) is not list:
warn(WSGIWarning('header list is not a list'), stacklevel=3)
for item in headers:
if type(item) is not tuple or len(item) != 2:
warn(WSGIWarning('Headers must tuple 2-item tuples'),
stacklevel=3)
name, value = item
if type(name) is not str or type(value) is not str:
warn(WSGIWarning('header items must be strings'),
stacklevel=3)
if name.lower() == 'status':
warn(WSGIWarning('The status header is not supported due to '
'conflicts with the CGI spec.'),
stacklevel=3)
if exc_info is not None and not isinstance(exc_info, tuple):
warn(WSGIWarning('invalid value for exc_info'), stacklevel=3)
headers = Headers(headers)
self.check_headers(headers)
return status_code, headers
def check_headers(self, headers):
etag = headers.get('etag')
if etag is not None:
if etag.startswith('w/'):
etag = etag[2:]
if not (etag[:1] == etag[-1:] == '"'):
warn(HTTPWarning('unquoted etag emitted.'), stacklevel=4)
location = headers.get('location')
if location is not None:
if not urlparse(location).netloc:
warn(HTTPWarning('absolute URLs required for location header'),
stacklevel=4)
def check_iterator(self, app_iter):
if isinstance(app_iter, string_types):
warn(WSGIWarning('application returned string. Response will '
'send character for character to the client '
'which will kill the performance. Return a '
'list or iterable instead.'), stacklevel=3)
def __call__(self, *args, **kwargs):
if len(args) != 2:
warn(WSGIWarning('Two arguments to WSGI app required'), stacklevel=2)
if kwargs:
warn(WSGIWarning('No keyword arguments to WSGI app allowed'),
stacklevel=2)
environ, start_response = args
self.check_environ(environ)
environ['wsgi.input'] = InputStream(environ['wsgi.input'])
environ['wsgi.errors'] = ErrorStream(environ['wsgi.errors'])
# hook our own file wrapper in so that applications will always
# iterate to the end and we can check the content length
environ['wsgi.file_wrapper'] = FileWrapper
headers_set = []
chunks = []
def checking_start_response(*args, **kwargs):
if len(args) not in (2, 3):
warn(WSGIWarning('Invalid number of arguments: %s, expected '
'2 or 3' % len(args), stacklevel=2))
if kwargs:
warn(WSGIWarning('no keyword arguments allowed.'))
status, headers = args[:2]
if len(args) == 3:
exc_info = args[2]
else:
exc_info = None
headers_set[:] = self.check_start_response(status, headers,
exc_info)
return GuardedWrite(start_response(status, headers, exc_info),
chunks)
app_iter = self.app(environ, checking_start_response)
self.check_iterator(app_iter)
return GuardedIterator(app_iter, headers_set, chunks)
|
magfest/tabletop
|
refs/heads/master
|
tabletop/models.py
|
1
|
from tabletop import *
@Session.model_mixin
class SessionMixin:
def entrants(self):
return (self.query(TabletopEntrant)
.options(joinedload(TabletopEntrant.reminder),
joinedload(TabletopEntrant.attendee),
subqueryload(TabletopEntrant.tournament).subqueryload(TabletopTournament.event)))
def entrants_by_phone(self):
entrants = defaultdict(list)
for entrant in self.entrants():
entrants[normalize(entrant.attendee.cellphone)].append(entrant)
return entrants
@Session.model_mixin
class Attendee:
games = relationship('TabletopGame', backref='attendee')
checkouts = relationship('TabletopCheckout', backref='attendee')
entrants = relationship('TabletopEntrant', backref='attendee')
@Session.model_mixin
class Event:
tournaments = relationship('TabletopTournament', backref='event', uselist=False)
class TabletopGame(MagModel):
code = Column(UnicodeText)
name = Column(UnicodeText)
attendee_id = Column(UUID, ForeignKey('attendee.id'))
returned = Column(Boolean, default=False)
checkouts = relationship('TabletopCheckout', backref='game')
_repr_attr_names = ['name']
@property
def checked_out(self):
try:
return [c for c in self.checkouts if not c.returned][0]
except:
pass
class TabletopCheckout(MagModel):
game_id = Column(UUID, ForeignKey('tabletop_game.id'))
attendee_id = Column(UUID, ForeignKey('attendee.id'))
checked_out = Column(UTCDateTime, default=lambda: datetime.now(UTC))
returned = Column(UTCDateTime, nullable=True)
class TabletopTournament(MagModel):
event_id = Column(UUID, ForeignKey('event.id'), unique=True)
name = Column(UnicodeText) # separate from the event name for cases where we want a shorter name in our SMS messages
entrants = relationship('TabletopEntrant', backref='tournament')
class TabletopEntrant(MagModel):
tournament_id = Column(UUID, ForeignKey('tabletop_tournament.id'))
attendee_id = Column(UUID, ForeignKey('attendee.id'))
signed_up = Column(UTCDateTime, default=lambda: datetime.now(UTC))
confirmed = Column(Boolean, default=False)
reminder = relationship('TabletopSmsReminder', backref='entrant', uselist=False)
replies = relationship('TabletopSmsReply', backref='entrant')
@presave_adjustment
def _within_cutoff(self):
if self.is_new:
tournament = self.tournament or self.session.tabletop_tournament(self.tournament_id)
if self.signed_up > tournament.event.start_time - timedelta(minutes=c.SMS_CUTOFF_MINUTES):
self.confirmed = True
@property
def should_send_reminder(self):
return not self.confirmed and not self.reminder \
and localized_now() < self.tournament.event.start_time \
and localized_now() > self.signed_up + timedelta(minutes=c.TABLETOP_SMS_STAGGER_MINUTES) \
and localized_now() > self.tournament.event.start_time - timedelta(minutes=c.TABLETOP_SMS_REMINDER_MINUTES)
def matches(self, message):
sent = message.date_sent.replace(tzinfo=UTC)
return normalize(self.attendee.cellphone) == message.from_ \
and self.reminder and sent > self.reminder.when \
and sent < self.tournament.event.start_time + timedelta(minutes=c.TABLETOP_TOURNAMENT_SLACK)
__table_args__ = (
UniqueConstraint('tournament_id', 'attendee_id', name='_tournament_entrant_uniq'),
)
class TabletopSmsReminder(MagModel):
entrant_id = Column(UUID, ForeignKey('tabletop_entrant.id'), unique=True)
sid = Column(UnicodeText)
when = Column(UTCDateTime, default=lambda: datetime.now(UTC))
text = Column(UnicodeText)
class TabletopSmsReply(MagModel):
entrant_id = Column(UUID, ForeignKey('tabletop_entrant.id'), nullable=True)
sid = Column(UnicodeText)
when = Column(UTCDateTime)
text = Column(UnicodeText)
|
jsteemann/arangodb
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/encodings/mac_greek.py
|
593
|
""" Python Character Mapping Codec mac_greek generated from 'MAPPINGS/VENDORS/APPLE/GREEK.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-greek',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xb9' # 0x81 -> SUPERSCRIPT ONE
u'\xb2' # 0x82 -> SUPERSCRIPT TWO
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xb3' # 0x84 -> SUPERSCRIPT THREE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u0385' # 0x87 -> GREEK DIALYTIKA TONOS
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\u0384' # 0x8B -> GREEK TONOS
u'\xa8' # 0x8C -> DIAERESIS
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xa3' # 0x92 -> POUND SIGN
u'\u2122' # 0x93 -> TRADE MARK SIGN
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\u2022' # 0x96 -> BULLET
u'\xbd' # 0x97 -> VULGAR FRACTION ONE HALF
u'\u2030' # 0x98 -> PER MILLE SIGN
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xa6' # 0x9B -> BROKEN BAR
u'\u20ac' # 0x9C -> EURO SIGN # before Mac OS 9.2.2, was SOFT HYPHEN
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\u0393' # 0xA1 -> GREEK CAPITAL LETTER GAMMA
u'\u0394' # 0xA2 -> GREEK CAPITAL LETTER DELTA
u'\u0398' # 0xA3 -> GREEK CAPITAL LETTER THETA
u'\u039b' # 0xA4 -> GREEK CAPITAL LETTER LAMDA
u'\u039e' # 0xA5 -> GREEK CAPITAL LETTER XI
u'\u03a0' # 0xA6 -> GREEK CAPITAL LETTER PI
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u03a3' # 0xAA -> GREEK CAPITAL LETTER SIGMA
u'\u03aa' # 0xAB -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
u'\xa7' # 0xAC -> SECTION SIGN
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\xb0' # 0xAE -> DEGREE SIGN
u'\xb7' # 0xAF -> MIDDLE DOT
u'\u0391' # 0xB0 -> GREEK CAPITAL LETTER ALPHA
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\xa5' # 0xB4 -> YEN SIGN
u'\u0392' # 0xB5 -> GREEK CAPITAL LETTER BETA
u'\u0395' # 0xB6 -> GREEK CAPITAL LETTER EPSILON
u'\u0396' # 0xB7 -> GREEK CAPITAL LETTER ZETA
u'\u0397' # 0xB8 -> GREEK CAPITAL LETTER ETA
u'\u0399' # 0xB9 -> GREEK CAPITAL LETTER IOTA
u'\u039a' # 0xBA -> GREEK CAPITAL LETTER KAPPA
u'\u039c' # 0xBB -> GREEK CAPITAL LETTER MU
u'\u03a6' # 0xBC -> GREEK CAPITAL LETTER PHI
u'\u03ab' # 0xBD -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
u'\u03a8' # 0xBE -> GREEK CAPITAL LETTER PSI
u'\u03a9' # 0xBF -> GREEK CAPITAL LETTER OMEGA
u'\u03ac' # 0xC0 -> GREEK SMALL LETTER ALPHA WITH TONOS
u'\u039d' # 0xC1 -> GREEK CAPITAL LETTER NU
u'\xac' # 0xC2 -> NOT SIGN
u'\u039f' # 0xC3 -> GREEK CAPITAL LETTER OMICRON
u'\u03a1' # 0xC4 -> GREEK CAPITAL LETTER RHO
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u03a4' # 0xC6 -> GREEK CAPITAL LETTER TAU
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\u03a5' # 0xCB -> GREEK CAPITAL LETTER UPSILON
u'\u03a7' # 0xCC -> GREEK CAPITAL LETTER CHI
u'\u0386' # 0xCD -> GREEK CAPITAL LETTER ALPHA WITH TONOS
u'\u0388' # 0xCE -> GREEK CAPITAL LETTER EPSILON WITH TONOS
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u2013' # 0xD0 -> EN DASH
u'\u2015' # 0xD1 -> HORIZONTAL BAR
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u0389' # 0xD7 -> GREEK CAPITAL LETTER ETA WITH TONOS
u'\u038a' # 0xD8 -> GREEK CAPITAL LETTER IOTA WITH TONOS
u'\u038c' # 0xD9 -> GREEK CAPITAL LETTER OMICRON WITH TONOS
u'\u038e' # 0xDA -> GREEK CAPITAL LETTER UPSILON WITH TONOS
u'\u03ad' # 0xDB -> GREEK SMALL LETTER EPSILON WITH TONOS
u'\u03ae' # 0xDC -> GREEK SMALL LETTER ETA WITH TONOS
u'\u03af' # 0xDD -> GREEK SMALL LETTER IOTA WITH TONOS
u'\u03cc' # 0xDE -> GREEK SMALL LETTER OMICRON WITH TONOS
u'\u038f' # 0xDF -> GREEK CAPITAL LETTER OMEGA WITH TONOS
u'\u03cd' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH TONOS
u'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA
u'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA
u'\u03c8' # 0xE3 -> GREEK SMALL LETTER PSI
u'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA
u'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON
u'\u03c6' # 0xE6 -> GREEK SMALL LETTER PHI
u'\u03b3' # 0xE7 -> GREEK SMALL LETTER GAMMA
u'\u03b7' # 0xE8 -> GREEK SMALL LETTER ETA
u'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA
u'\u03be' # 0xEA -> GREEK SMALL LETTER XI
u'\u03ba' # 0xEB -> GREEK SMALL LETTER KAPPA
u'\u03bb' # 0xEC -> GREEK SMALL LETTER LAMDA
u'\u03bc' # 0xED -> GREEK SMALL LETTER MU
u'\u03bd' # 0xEE -> GREEK SMALL LETTER NU
u'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON
u'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI
u'\u03ce' # 0xF1 -> GREEK SMALL LETTER OMEGA WITH TONOS
u'\u03c1' # 0xF2 -> GREEK SMALL LETTER RHO
u'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA
u'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU
u'\u03b8' # 0xF5 -> GREEK SMALL LETTER THETA
u'\u03c9' # 0xF6 -> GREEK SMALL LETTER OMEGA
u'\u03c2' # 0xF7 -> GREEK SMALL LETTER FINAL SIGMA
u'\u03c7' # 0xF8 -> GREEK SMALL LETTER CHI
u'\u03c5' # 0xF9 -> GREEK SMALL LETTER UPSILON
u'\u03b6' # 0xFA -> GREEK SMALL LETTER ZETA
u'\u03ca' # 0xFB -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
u'\u03cb' # 0xFC -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
u'\u0390' # 0xFD -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
u'\u03b0' # 0xFE -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
u'\xad' # 0xFF -> SOFT HYPHEN # before Mac OS 9.2.2, was undefined
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
chrisng93/todo-app
|
refs/heads/master
|
server/manage.py
|
1
|
from flask_script import Manager
from app.app import create_app
from app.extensions import db
manager = Manager(create_app)
@manager.command
def createdb():
app = create_app()
with app.app_context():
db.drop_all()
db.create_all()
if __name__ == '__main__':
manager.run()
|
zack3241/incubator-airflow
|
refs/heads/master
|
airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py
|
58
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""add dag_stats table
Revision ID: f2ca10b85618
Revises: 64de9cddf6c9
Create Date: 2016-07-20 15:08:28.247537
"""
# revision identifiers, used by Alembic.
revision = 'f2ca10b85618'
down_revision = '64de9cddf6c9'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('dag_stats',
sa.Column('dag_id', sa.String(length=250), nullable=False),
sa.Column('state', sa.String(length=50), nullable=False),
sa.Column('count', sa.Integer(), nullable=False, default=0),
sa.Column('dirty', sa.Boolean(), nullable=False, default=False),
sa.PrimaryKeyConstraint('dag_id', 'state'))
def downgrade():
op.drop_table('dag_stats')
|
sinkuri256/python-for-android
|
refs/heads/master
|
python-modules/twisted/twisted/python/hashlib.py
|
68
|
# -*- test-case-name: twisted.python.test.test_hashlib -*-
# Copyright (c) 2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
L{twisted.python.hashlib} presents a subset of the interface provided by
U{hashlib<http://docs.python.org/library/hashlib.html>}. The subset is the
interface required by various parts of Twisted. This allows application code
to transparently use APIs which existed before C{hashlib} was introduced or to
use C{hashlib} if it is available.
"""
try:
_hashlib = __import__("hashlib")
except ImportError:
from md5 import md5
from sha import sha as sha1
else:
md5 = _hashlib.md5
sha1 = _hashlib.sha1
__all__ = ["md5", "sha1"]
|
SUSE/azure-sdk-for-python
|
refs/heads/master
|
azure-mgmt-documentdb/azure/mgmt/documentdb/models/database_account_connection_string.py
|
5
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class DatabaseAccountConnectionString(Model):
"""Connection string for the DocumentDB account.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar connection_string: Value of the connection string
:vartype connection_string: str
:ivar description: Description of the connection string
:vartype description: str
"""
_validation = {
'connection_string': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'connection_string': {'key': 'connectionString', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(self):
self.connection_string = None
self.description = None
|
jamespcole/home-assistant
|
refs/heads/master
|
homeassistant/components/honeywell/climate.py
|
2
|
"""
Support for Honeywell Round Connected and Honeywell Evohome thermostats.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/climate.honeywell/
"""
import logging
import socket
import datetime
import requests
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.climate import ClimateDevice, PLATFORM_SCHEMA
from homeassistant.components.climate.const import (
ATTR_FAN_MODE, ATTR_FAN_LIST,
ATTR_OPERATION_MODE, ATTR_OPERATION_LIST, SUPPORT_TARGET_TEMPERATURE,
SUPPORT_AWAY_MODE, SUPPORT_OPERATION_MODE)
from homeassistant.const import (
CONF_PASSWORD, CONF_USERNAME, TEMP_CELSIUS, TEMP_FAHRENHEIT,
ATTR_TEMPERATURE, CONF_REGION)
REQUIREMENTS = ['evohomeclient==0.2.8', 'somecomfort==0.5.2']
_LOGGER = logging.getLogger(__name__)
ATTR_FAN = 'fan'
ATTR_SYSTEM_MODE = 'system_mode'
ATTR_CURRENT_OPERATION = 'equipment_output_status'
CONF_AWAY_TEMPERATURE = 'away_temperature'
CONF_COOL_AWAY_TEMPERATURE = 'away_cool_temperature'
CONF_HEAT_AWAY_TEMPERATURE = 'away_heat_temperature'
DEFAULT_AWAY_TEMPERATURE = 16
DEFAULT_COOL_AWAY_TEMPERATURE = 30
DEFAULT_HEAT_AWAY_TEMPERATURE = 16
DEFAULT_REGION = 'eu'
REGIONS = ['eu', 'us']
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_AWAY_TEMPERATURE,
default=DEFAULT_AWAY_TEMPERATURE): vol.Coerce(float),
vol.Optional(CONF_COOL_AWAY_TEMPERATURE,
default=DEFAULT_COOL_AWAY_TEMPERATURE): vol.Coerce(float),
vol.Optional(CONF_HEAT_AWAY_TEMPERATURE,
default=DEFAULT_HEAT_AWAY_TEMPERATURE): vol.Coerce(float),
vol.Optional(CONF_REGION, default=DEFAULT_REGION): vol.In(REGIONS),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Honeywell thermostat."""
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
region = config.get(CONF_REGION)
if region == 'us':
return _setup_us(username, password, config, add_entities)
return _setup_round(username, password, config, add_entities)
def _setup_round(username, password, config, add_entities):
"""Set up the rounding function."""
from evohomeclient import EvohomeClient
away_temp = config.get(CONF_AWAY_TEMPERATURE)
evo_api = EvohomeClient(username, password)
try:
zones = evo_api.temperatures(force_refresh=True)
for i, zone in enumerate(zones):
add_entities(
[RoundThermostat(evo_api, zone['id'], i == 0, away_temp)],
True
)
except socket.error:
_LOGGER.error(
"Connection error logging into the honeywell evohome web service")
return False
return True
# config will be used later
def _setup_us(username, password, config, add_entities):
"""Set up the user."""
import somecomfort
try:
client = somecomfort.SomeComfort(username, password)
except somecomfort.AuthError:
_LOGGER.error("Failed to login to honeywell account %s", username)
return False
except somecomfort.SomeComfortError as ex:
_LOGGER.error("Failed to initialize honeywell client: %s", str(ex))
return False
dev_id = config.get('thermostat')
loc_id = config.get('location')
cool_away_temp = config.get(CONF_COOL_AWAY_TEMPERATURE)
heat_away_temp = config.get(CONF_HEAT_AWAY_TEMPERATURE)
add_entities([HoneywellUSThermostat(client, device, cool_away_temp,
heat_away_temp, username, password)
for location in client.locations_by_id.values()
for device in location.devices_by_id.values()
if ((not loc_id or location.locationid == loc_id) and
(not dev_id or device.deviceid == dev_id))])
return True
class RoundThermostat(ClimateDevice):
"""Representation of a Honeywell Round Connected thermostat."""
def __init__(self, client, zone_id, master, away_temp):
"""Initialize the thermostat."""
self.client = client
self._current_temperature = None
self._target_temperature = None
self._name = 'round connected'
self._id = zone_id
self._master = master
self._is_dhw = False
self._away_temp = away_temp
self._away = False
@property
def supported_features(self):
"""Return the list of supported features."""
supported = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_AWAY_MODE)
if hasattr(self.client, ATTR_SYSTEM_MODE):
supported |= SUPPORT_OPERATION_MODE
return supported
@property
def name(self):
"""Return the name of the honeywell, if any."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self._is_dhw:
return None
return self._target_temperature
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
self.client.set_temperature(self._name, temperature)
@property
def current_operation(self) -> str:
"""Get the current operation of the system."""
return getattr(self.client, ATTR_SYSTEM_MODE, None)
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
return self._away
def set_operation_mode(self, operation_mode: str) -> None:
"""Set the HVAC mode for the thermostat."""
if hasattr(self.client, ATTR_SYSTEM_MODE):
self.client.system_mode = operation_mode
def turn_away_mode_on(self):
"""Turn away on.
Honeywell does have a proprietary away mode, but it doesn't really work
the way it should. For example: If you set a temperature manually
it doesn't get overwritten when away mode is switched on.
"""
self._away = True
self.client.set_temperature(self._name, self._away_temp)
def turn_away_mode_off(self):
"""Turn away off."""
self._away = False
self.client.cancel_temp_override(self._name)
def update(self):
"""Get the latest date."""
try:
# Only refresh if this is the "master" device,
# others will pick up the cache
for val in self.client.temperatures(force_refresh=self._master):
if val['id'] == self._id:
data = val
except KeyError:
_LOGGER.error("Update failed from Honeywell server")
self.client.user_data = None
return
except StopIteration:
_LOGGER.error("Did not receive any temperature data from the "
"evohomeclient API")
return
self._current_temperature = data['temp']
self._target_temperature = data['setpoint']
if data['thermostat'] == 'DOMESTIC_HOT_WATER':
self._name = 'Hot Water'
self._is_dhw = True
else:
self._name = data['name']
self._is_dhw = False
# The underlying library doesn't expose the thermostat's mode
# but we can pull it out of the big dictionary of information.
device = self.client.devices[self._id]
self.client.system_mode = device[
'thermostat']['changeableValues']['mode']
class HoneywellUSThermostat(ClimateDevice):
"""Representation of a Honeywell US Thermostat."""
def __init__(self, client, device, cool_away_temp,
heat_away_temp, username, password):
"""Initialize the thermostat."""
self._client = client
self._device = device
self._cool_away_temp = cool_away_temp
self._heat_away_temp = heat_away_temp
self._away = False
self._username = username
self._password = password
@property
def supported_features(self):
"""Return the list of supported features."""
supported = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_AWAY_MODE)
if hasattr(self._device, ATTR_SYSTEM_MODE):
supported |= SUPPORT_OPERATION_MODE
return supported
@property
def is_fan_on(self):
"""Return true if fan is on."""
return self._device.fan_running
@property
def name(self):
"""Return the name of the honeywell, if any."""
return self._device.name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return (TEMP_CELSIUS if self._device.temperature_unit == 'C'
else TEMP_FAHRENHEIT)
@property
def current_temperature(self):
"""Return the current temperature."""
return self._device.current_temperature
@property
def current_humidity(self):
"""Return the current humidity."""
return self._device.current_humidity
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self._device.system_mode == 'cool':
return self._device.setpoint_cool
return self._device.setpoint_heat
@property
def current_operation(self) -> str:
"""Return current operation ie. heat, cool, idle."""
oper = getattr(self._device, ATTR_CURRENT_OPERATION, None)
if oper == "off":
oper = "idle"
return oper
def set_temperature(self, **kwargs):
"""Set target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
import somecomfort
try:
# Get current mode
mode = self._device.system_mode
# Set hold if this is not the case
if getattr(self._device, "hold_{}".format(mode)) is False:
# Get next period key
next_period_key = '{}NextPeriod'.format(mode.capitalize())
# Get next period raw value
next_period = self._device.raw_ui_data.get(next_period_key)
# Get next period time
hour, minute = divmod(next_period * 15, 60)
# Set hold time
setattr(self._device,
"hold_{}".format(mode),
datetime.time(hour, minute))
# Set temperature
setattr(self._device,
"setpoint_{}".format(mode),
temperature)
except somecomfort.SomeComfortError:
_LOGGER.error("Temperature %.1f out of range", temperature)
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
import somecomfort
data = {
ATTR_FAN: (self.is_fan_on and 'running' or 'idle'),
ATTR_FAN_MODE: self._device.fan_mode,
ATTR_OPERATION_MODE: self._device.system_mode,
}
data[ATTR_FAN_LIST] = somecomfort.FAN_MODES
data[ATTR_OPERATION_LIST] = somecomfort.SYSTEM_MODES
return data
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
return self._away
def turn_away_mode_on(self):
"""Turn away on.
Somecomfort does have a proprietary away mode, but it doesn't really
work the way it should. For example: If you set a temperature manually
it doesn't get overwritten when away mode is switched on.
"""
self._away = True
import somecomfort
try:
# Get current mode
mode = self._device.system_mode
except somecomfort.SomeComfortError:
_LOGGER.error('Can not get system mode')
return
try:
# Set permanent hold
setattr(self._device,
"hold_{}".format(mode),
True)
# Set temperature
setattr(self._device,
"setpoint_{}".format(mode),
getattr(self, "_{}_away_temp".format(mode)))
except somecomfort.SomeComfortError:
_LOGGER.error('Temperature %.1f out of range',
getattr(self, "_{}_away_temp".format(mode)))
def turn_away_mode_off(self):
"""Turn away off."""
self._away = False
import somecomfort
try:
# Disabling all hold modes
self._device.hold_cool = False
self._device.hold_heat = False
except somecomfort.SomeComfortError:
_LOGGER.error('Can not stop hold mode')
def set_operation_mode(self, operation_mode: str) -> None:
"""Set the system mode (Cool, Heat, etc)."""
if hasattr(self._device, ATTR_SYSTEM_MODE):
self._device.system_mode = operation_mode
def update(self):
"""Update the state."""
import somecomfort
retries = 3
while retries > 0:
try:
self._device.refresh()
break
except (somecomfort.client.APIRateLimited, OSError,
requests.exceptions.ReadTimeout) as exp:
retries -= 1
if retries == 0:
raise exp
if not self._retry():
raise exp
_LOGGER.error(
"SomeComfort update failed, Retrying - Error: %s", exp)
def _retry(self):
"""Recreate a new somecomfort client.
When we got an error, the best way to be sure that the next query
will succeed, is to recreate a new somecomfort client.
"""
import somecomfort
try:
self._client = somecomfort.SomeComfort(
self._username, self._password)
except somecomfort.AuthError:
_LOGGER.error("Failed to login to honeywell account %s",
self._username)
return False
except somecomfort.SomeComfortError as ex:
_LOGGER.error("Failed to initialize honeywell client: %s",
str(ex))
return False
devices = [device
for location in self._client.locations_by_id.values()
for device in location.devices_by_id.values()
if device.name == self._device.name]
if len(devices) != 1:
_LOGGER.error("Failed to find device %s", self._device.name)
return False
self._device = devices[0]
return True
|
CCPorg/MCR-MicroCash-Ver-1000gdf9d55a-Copy
|
refs/heads/master
|
share/qt/extract_strings_qt.py
|
1294
|
#!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {')
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
|
detiber/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/centurylink/clc_firewall_policy.py
|
27
|
#!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: clc_firewall_policy
short_description: Create/delete/update firewall policies
description:
- Create or delete or update firewall polices on Centurylink Cloud
version_added: "2.0"
options:
location:
description:
- Target datacenter for the firewall policy
required: True
state:
description:
- Whether to create or delete the firewall policy
default: present
required: False
choices: ['present', 'absent']
source:
description:
- The list of source addresses for traffic on the originating firewall.
This is required when state is 'present"
default: None
required: False
destination:
description:
- The list of destination addresses for traffic on the terminating firewall.
This is required when state is 'present'
default: None
required: False
ports:
description:
- The list of ports associated with the policy.
TCP and UDP can take in single ports or port ranges.
default: None
required: False
choices: ['any', 'icmp', 'TCP/123', 'UDP/123', 'TCP/123-456', 'UDP/123-456']
firewall_policy_id:
description:
- Id of the firewall policy. This is required to update or delete an existing firewall policy
default: None
required: False
source_account_alias:
description:
- CLC alias for the source account
required: True
destination_account_alias:
description:
- CLC alias for the destination account
default: None
required: False
wait:
description:
- Whether to wait for the provisioning tasks to finish before returning.
default: True
required: False
choices: [True, False]
enabled:
description:
- Whether the firewall policy is enabled or disabled
default: True
required: False
choices: [True, False]
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
author: "CLC Runner (@clc-runner)"
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account password for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
---
- name: Create Firewall Policy
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Create / Verify an Firewall Policy at CenturyLink Cloud
clc_firewall:
source_account_alias: WFAD
location: VA1
state: present
source: 10.128.216.0/24
destination: 10.128.216.0/24
ports: Any
destination_account_alias: WFAD
---
- name: Delete Firewall Policy
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Delete an Firewall Policy at CenturyLink Cloud
clc_firewall:
source_account_alias: WFAD
location: VA1
state: absent
firewall_policy_id: c62105233d7a4231bd2e91b9c791e43e1
'''
RETURN = '''
firewall_policy_id:
description: The fire wall policy id
returned: success
type: string
sample: fc36f1bfd47242e488a9c44346438c05
firewall_policy:
description: The fire wall policy information
returned: success
type: dict
sample:
{
"destination":[
"10.1.1.0/24",
"10.2.2.0/24"
],
"destinationAccount":"wfad",
"enabled":true,
"id":"fc36f1bfd47242e488a9c44346438c05",
"links":[
{
"href":"http://api.ctl.io/v2-experimental/firewallPolicies/wfad/uc1/fc36f1bfd47242e488a9c44346438c05",
"rel":"self",
"verbs":[
"GET",
"PUT",
"DELETE"
]
}
],
"ports":[
"any"
],
"source":[
"10.1.1.0/24",
"10.2.2.0/24"
],
"status":"active"
}
'''
__version__ = '${version}'
import urlparse
from time import sleep
from distutils.version import LooseVersion
try:
import requests
except ImportError:
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
try:
import clc as clc_sdk
from clc import CLCException
from clc import APIFailedResponse
except ImportError:
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
class ClcFirewallPolicy:
clc = None
def __init__(self, module):
"""
Construct module
"""
self.clc = clc_sdk
self.module = module
self.firewall_dict = {}
if not CLC_FOUND:
self.module.fail_json(
msg='clc-python-sdk required for this module')
if not REQUESTS_FOUND:
self.module.fail_json(
msg='requests library is required for this module')
if requests.__version__ and LooseVersion(
requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
@staticmethod
def _define_module_argument_spec():
"""
Define the argument spec for the ansible module
:return: argument spec dictionary
"""
argument_spec = dict(
location=dict(required=True),
source_account_alias=dict(required=True, default=None),
destination_account_alias=dict(default=None),
firewall_policy_id=dict(default=None),
ports=dict(default=None, type='list'),
source=dict(defualt=None, type='list'),
destination=dict(defualt=None, type='list'),
wait=dict(default=True),
state=dict(default='present', choices=['present', 'absent']),
enabled=dict(defualt=True, choices=[True, False])
)
return argument_spec
def process_request(self):
"""
Execute the main code path, and handle the request
:return: none
"""
changed = False
firewall_policy = None
location = self.module.params.get('location')
source_account_alias = self.module.params.get('source_account_alias')
destination_account_alias = self.module.params.get(
'destination_account_alias')
firewall_policy_id = self.module.params.get('firewall_policy_id')
ports = self.module.params.get('ports')
source = self.module.params.get('source')
destination = self.module.params.get('destination')
wait = self.module.params.get('wait')
state = self.module.params.get('state')
enabled = self.module.params.get('enabled')
self.firewall_dict = {
'location': location,
'source_account_alias': source_account_alias,
'destination_account_alias': destination_account_alias,
'firewall_policy_id': firewall_policy_id,
'ports': ports,
'source': source,
'destination': destination,
'wait': wait,
'state': state,
'enabled': enabled}
self._set_clc_credentials_from_env()
if state == 'absent':
changed, firewall_policy_id, firewall_policy = self._ensure_firewall_policy_is_absent(
source_account_alias, location, self.firewall_dict)
elif state == 'present':
changed, firewall_policy_id, firewall_policy = self._ensure_firewall_policy_is_present(
source_account_alias, location, self.firewall_dict)
return self.module.exit_json(
changed=changed,
firewall_policy_id=firewall_policy_id,
firewall_policy=firewall_policy)
@staticmethod
def _get_policy_id_from_response(response):
"""
Method to parse out the policy id from creation response
:param response: response from firewall creation API call
:return: policy_id: firewall policy id from creation call
"""
url = response.get('links')[0]['href']
path = urlparse.urlparse(url).path
path_list = os.path.split(path)
policy_id = path_list[-1]
return policy_id
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
def _ensure_firewall_policy_is_present(
self,
source_account_alias,
location,
firewall_dict):
"""
Ensures that a given firewall policy is present
:param source_account_alias: the source account alias for the firewall policy
:param location: datacenter of the firewall policy
:param firewall_dict: dictionary of request parameters for firewall policy
:return: (changed, firewall_policy_id, firewall_policy)
changed: flag for if a change occurred
firewall_policy_id: the firewall policy id that was created/updated
firewall_policy: The firewall_policy object
"""
firewall_policy = None
firewall_policy_id = firewall_dict.get('firewall_policy_id')
if firewall_policy_id is None:
if not self.module.check_mode:
response = self._create_firewall_policy(
source_account_alias,
location,
firewall_dict)
firewall_policy_id = self._get_policy_id_from_response(
response)
changed = True
else:
firewall_policy = self._get_firewall_policy(
source_account_alias, location, firewall_policy_id)
if not firewall_policy:
return self.module.fail_json(
msg='Unable to find the firewall policy id : {0}'.format(
firewall_policy_id))
changed = self._compare_get_request_with_dict(
firewall_policy,
firewall_dict)
if not self.module.check_mode and changed:
self._update_firewall_policy(
source_account_alias,
location,
firewall_policy_id,
firewall_dict)
if changed and firewall_policy_id:
firewall_policy = self._wait_for_requests_to_complete(
source_account_alias,
location,
firewall_policy_id)
return changed, firewall_policy_id, firewall_policy
def _ensure_firewall_policy_is_absent(
self,
source_account_alias,
location,
firewall_dict):
"""
Ensures that a given firewall policy is removed if present
:param source_account_alias: the source account alias for the firewall policy
:param location: datacenter of the firewall policy
:param firewall_dict: firewall policy to delete
:return: (changed, firewall_policy_id, response)
changed: flag for if a change occurred
firewall_policy_id: the firewall policy id that was deleted
response: response from CLC API call
"""
changed = False
response = []
firewall_policy_id = firewall_dict.get('firewall_policy_id')
result = self._get_firewall_policy(
source_account_alias, location, firewall_policy_id)
if result:
if not self.module.check_mode:
response = self._delete_firewall_policy(
source_account_alias,
location,
firewall_policy_id)
changed = True
return changed, firewall_policy_id, response
def _create_firewall_policy(
self,
source_account_alias,
location,
firewall_dict):
"""
Creates the firewall policy for the given account alias
:param source_account_alias: the source account alias for the firewall policy
:param location: datacenter of the firewall policy
:param firewall_dict: dictionary of request parameters for firewall policy
:return: response from CLC API call
"""
payload = {
'destinationAccount': firewall_dict.get('destination_account_alias'),
'source': firewall_dict.get('source'),
'destination': firewall_dict.get('destination'),
'ports': firewall_dict.get('ports')}
try:
response = self.clc.v2.API.Call(
'POST', '/v2-experimental/firewallPolicies/%s/%s' %
(source_account_alias, location), payload)
except APIFailedResponse as e:
return self.module.fail_json(
msg="Unable to create firewall policy. %s" %
str(e.response_text))
return response
def _delete_firewall_policy(
self,
source_account_alias,
location,
firewall_policy_id):
"""
Deletes a given firewall policy for an account alias in a datacenter
:param source_account_alias: the source account alias for the firewall policy
:param location: datacenter of the firewall policy
:param firewall_policy_id: firewall policy id to delete
:return: response: response from CLC API call
"""
try:
response = self.clc.v2.API.Call(
'DELETE', '/v2-experimental/firewallPolicies/%s/%s/%s' %
(source_account_alias, location, firewall_policy_id))
except APIFailedResponse as e:
return self.module.fail_json(
msg="Unable to delete the firewall policy id : {0}. {1}".format(
firewall_policy_id, str(e.response_text)))
return response
def _update_firewall_policy(
self,
source_account_alias,
location,
firewall_policy_id,
firewall_dict):
"""
Updates a firewall policy for a given datacenter and account alias
:param source_account_alias: the source account alias for the firewall policy
:param location: datacenter of the firewall policy
:param firewall_policy_id: firewall policy id to update
:param firewall_dict: dictionary of request parameters for firewall policy
:return: response: response from CLC API call
"""
try:
response = self.clc.v2.API.Call(
'PUT',
'/v2-experimental/firewallPolicies/%s/%s/%s' %
(source_account_alias,
location,
firewall_policy_id),
firewall_dict)
except APIFailedResponse as e:
return self.module.fail_json(
msg="Unable to update the firewall policy id : {0}. {1}".format(
firewall_policy_id, str(e.response_text)))
return response
@staticmethod
def _compare_get_request_with_dict(response, firewall_dict):
"""
Helper method to compare the json response for getting the firewall policy with the request parameters
:param response: response from the get method
:param firewall_dict: dictionary of request parameters for firewall policy
:return: changed: Boolean that returns true if there are differences between
the response parameters and the playbook parameters
"""
changed = False
response_dest_account_alias = response.get('destinationAccount')
response_enabled = response.get('enabled')
response_source = response.get('source')
response_dest = response.get('destination')
response_ports = response.get('ports')
request_dest_account_alias = firewall_dict.get(
'destination_account_alias')
request_enabled = firewall_dict.get('enabled')
if request_enabled is None:
request_enabled = True
request_source = firewall_dict.get('source')
request_dest = firewall_dict.get('destination')
request_ports = firewall_dict.get('ports')
if (
response_dest_account_alias and str(response_dest_account_alias) != str(request_dest_account_alias)) or (
response_enabled != request_enabled) or (
response_source and response_source != request_source) or (
response_dest and response_dest != request_dest) or (
response_ports and response_ports != request_ports):
changed = True
return changed
def _get_firewall_policy(
self,
source_account_alias,
location,
firewall_policy_id):
"""
Get back details for a particular firewall policy
:param source_account_alias: the source account alias for the firewall policy
:param location: datacenter of the firewall policy
:param firewall_policy_id: id of the firewall policy to get
:return: response - The response from CLC API call
"""
response = None
try:
response = self.clc.v2.API.Call(
'GET', '/v2-experimental/firewallPolicies/%s/%s/%s' %
(source_account_alias, location, firewall_policy_id))
except APIFailedResponse as e:
if e.response_status_code != 404:
self.module.fail_json(
msg="Unable to fetch the firewall policy with id : {0}. {1}".format(
firewall_policy_id, str(e.response_text)))
return response
def _wait_for_requests_to_complete(
self,
source_account_alias,
location,
firewall_policy_id,
wait_limit=50):
"""
Waits until the CLC requests are complete if the wait argument is True
:param source_account_alias: The source account alias for the firewall policy
:param location: datacenter of the firewall policy
:param firewall_policy_id: The firewall policy id
:param wait_limit: The number of times to check the status for completion
:return: the firewall_policy object
"""
wait = self.module.params.get('wait')
count = 0
firewall_policy = None
while wait:
count += 1
firewall_policy = self._get_firewall_policy(
source_account_alias, location, firewall_policy_id)
status = firewall_policy.get('status')
if status == 'active' or count > wait_limit:
wait = False
else:
# wait for 2 seconds
sleep(2)
return firewall_policy
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
The main function. Instantiates the module and calls process_request.
:return: none
"""
module = AnsibleModule(
argument_spec=ClcFirewallPolicy._define_module_argument_spec(),
supports_check_mode=True)
clc_firewall = ClcFirewallPolicy(module)
clc_firewall.process_request()
from ansible.module_utils.basic import * # pylint: disable=W0614
if __name__ == '__main__':
main()
|
delhivery/django
|
refs/heads/master
|
django/contrib/sessions/exceptions.py
|
931
|
from django.core.exceptions import SuspiciousOperation
class InvalidSessionKey(SuspiciousOperation):
"""Invalid characters in session key"""
pass
class SuspiciousSession(SuspiciousOperation):
"""The session may be tampered with"""
pass
|
csdms/pymt
|
refs/heads/master
|
tests/grids/test_grid_type.py
|
2
|
import unittest
from pymt.grids.grid_type import (
GridType,
GridTypeRectilinear,
GridTypeStructured,
GridTypeUnstructured,
)
class TestGridType(unittest.TestCase):
def test_rectilinear(self):
type = GridTypeRectilinear()
self.assertEqual(str(type), "rectilinear")
self.assertEqual(type, "rectilinear")
self.assertEqual(type, GridTypeRectilinear())
self.assertNotEqual(type, GridTypeStructured)
self.assertNotEqual(type, GridType)
self.assertIsInstance(type, GridType)
def test_structured(self):
type = GridTypeStructured()
self.assertEqual(str(type), "structured")
self.assertEqual(type, "structured")
self.assertEqual(type, GridTypeStructured())
self.assertNotEqual(type, GridTypeRectilinear)
self.assertNotEqual(type, GridType)
self.assertIsInstance(type, GridType)
def test_unstructured(self):
type = GridTypeUnstructured()
self.assertEqual(str(type), "unstructured")
self.assertEqual(type, "unstructured")
self.assertEqual(type, GridTypeUnstructured())
self.assertNotEqual(type, GridTypeRectilinear)
self.assertNotEqual(type, GridType)
self.assertIsInstance(type, GridType)
|
rohitw1991/frappe
|
refs/heads/develop
|
frappe/core/__init__.py
|
107
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
|
nsat/gnuradio
|
refs/heads/master
|
grc/gui/Platform.py
|
18
|
"""
Copyright 2008, 2009 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import os
import sys
from ..core.Platform import Platform as _Platform
from .Config import Config as _Config
from .Block import Block as _Block
from .Connection import Connection as _Connection
from .Element import Element
from .FlowGraph import FlowGraph as _FlowGraph
from .Param import Param as _Param
from .Port import Port as _Port
class Platform(Element, _Platform):
def __init__(self, *args, **kwargs):
Element.__init__(self)
_Platform.__init__(self, *args, **kwargs)
# Ensure conf directories
gui_prefs_file = self.config.gui_prefs_file
if not os.path.exists(os.path.dirname(gui_prefs_file)):
os.mkdir(os.path.dirname(gui_prefs_file))
self._move_old_pref_file()
def get_prefs_file(self):
return self.config.gui_prefs_file
def _move_old_pref_file(self):
gui_prefs_file = self.config.gui_prefs_file
old_gui_prefs_file = os.environ.get(
'GRC_PREFS_PATH', os.path.expanduser('~/.grc'))
if gui_prefs_file == old_gui_prefs_file:
return # prefs file overridden with env var
if os.path.exists(old_gui_prefs_file) and not os.path.exists(gui_prefs_file):
try:
import shutil
shutil.move(old_gui_prefs_file, gui_prefs_file)
except Exception as e:
print >> sys.stderr, e
##############################################
# Constructors
##############################################
FlowGraph = _FlowGraph
Connection = _Connection
Block = _Block
Port = _Port
Param = _Param
Config = _Config
|
Cinntax/home-assistant
|
refs/heads/dev
|
tests/components/ios/__init__.py
|
43
|
"""Tests for the iOS component."""
|
apdjustino/DRCOG_Urbansim
|
refs/heads/master
|
src/opus_gui/util/documentationbase.py
|
1
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
# PyQt4 includes for python bindings to QT
from PyQt4.QtCore import QUrl, Qt, QString, QObject, SIGNAL
from PyQt4.QtGui import QTextBrowser, QWidget, QIcon, QVBoxLayout, QLabel, QPushButton
# Main
class DocumentationBase(QTextBrowser):
def __init__(self, mainwindow, src):
QTextBrowser.__init__(self, mainwindow)
self.mainwindow = mainwindow
self.src = src
self.setOpenExternalLinks(True)
self.setSource(QUrl(self.src))
class DocumentationTab(QWidget):
def __init__(self, mainwindow, filePath):
QWidget.__init__(self, mainwindow)
self.mainwindow = mainwindow
self.tabIcon = QIcon(":/Images/Images/chart_organisation.png")
self.tabLabel = "Documentation Tab"
self.tab = QWidget(self.mainwindow)
self.widgetLayout = QVBoxLayout(self.tab)
self.widgetLayout.setAlignment(Qt.AlignTop)
self.docStatusLabel = QLabel(self.tab)
self.docStatusLabel.setAlignment(Qt.AlignCenter)
self.docStatusLabel.setObjectName("docStatusLabel")
self.docStatusLabel.setText(QString("No documentation currently loaded..."))
self.widgetLayout.addWidget(self.docStatusLabel)
self.pbnRemoveDoc = QPushButton(self.tab)
self.pbnRemoveDoc.setObjectName("pbnRemoveDoc")
self.pbnRemoveDoc.setText(QString("Remove Documentation"))
QObject.connect(self.pbnRemoveDoc, SIGNAL("clicked()"),
self.clicked)
self.widgetLayout.addWidget(self.pbnRemoveDoc)
self.docStuff = DocumentationBase(self.mainwindow,filePath)
self.widgetLayout.addWidget(self.docStuff)
self.docStatusLabel.setText(QString(filePath))
self.mainwindow.tabWidget.insertTab(0,self.tab,self.tabIcon,self.tabLabel)
self.mainwindow.tabWidget.setCurrentIndex(0)
def clicked(self):
print "Remove Documentation Pressed..."
self.mainwindow.tabWidget.removeTab(self.mainwindow.tabWidget.indexOf(self.tab))
self.tab.hide()
|
elba7r/system
|
refs/heads/master
|
erpnext/patches/v5_0/taxes_and_totals_in_party_currency.py
|
103
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
from frappe.model.meta import get_field_precision
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
def execute():
selling_doctypes = ["Quotation", "Sales Order", "Delivery Note", "Sales Invoice"]
buying_doctypes = ["Supplier Quotation", "Purchase Order", "Purchase Receipt", "Purchase Invoice"]
for dt in selling_doctypes:
update_values(dt, "Sales Taxes and Charges")
for dt in buying_doctypes:
update_values(dt, "Purchase Taxes and Charges")
def update_values(dt, tax_table):
frappe.reload_doctype(dt)
frappe.reload_doctype(dt + " Item")
frappe.reload_doctype(tax_table)
net_total_precision = get_field_precision(frappe.get_meta(dt).get_field("net_total"))
for field in ("total", "base_total", "base_net_total"):
make_property_setter(dt, field, "precision", net_total_precision, "Select")
rate_field_precision = get_field_precision(frappe.get_meta(dt + " Item").get_field("rate"))
for field in ("net_rate", "base_net_rate", "net_amount", "base_net_amount", "base_rate", "base_amount"):
make_property_setter(dt + " Item", field, "precision", rate_field_precision, "Select")
tax_amount_precision = get_field_precision(frappe.get_meta(tax_table).get_field("tax_amount"))
for field in ("base_tax_amount", "total", "base_total", "tax_amount_after_discount_amount",
"base_tax_amount_after_discount_amount"):
make_property_setter(tax_table, field, "precision", tax_amount_precision, "Select")
# update net_total, discount_on
frappe.db.sql("""
UPDATE
`tab{0}`
SET
total = round(net_total, {1}),
base_total = round(net_total*conversion_rate, {1}),
net_total = round(base_net_total / conversion_rate, {1}),
apply_discount_on = "Grand Total"
WHERE
docstatus < 2
""".format(dt, net_total_precision))
# update net_amount
frappe.db.sql("""
UPDATE
`tab{0}` par, `tab{1}` item
SET
item.base_net_amount = round(item.base_amount, {2}),
item.base_net_rate = round(item.base_rate, {2}),
item.net_amount = round(item.base_amount / par.conversion_rate, {2}),
item.net_rate = round(item.base_rate / par.conversion_rate, {2}),
item.base_amount = round(item.amount * par.conversion_rate, {2}),
item.base_rate = round(item.rate * par.conversion_rate, {2})
WHERE
par.name = item.parent
and par.docstatus < 2
""".format(dt, dt + " Item", rate_field_precision))
# update tax in party currency
frappe.db.sql("""
UPDATE
`tab{0}` par, `tab{1}` tax
SET
tax.base_tax_amount = round(tax.tax_amount, {2}),
tax.tax_amount = round(tax.tax_amount / par.conversion_rate, {2}),
tax.base_total = round(tax.total, {2}),
tax.total = round(tax.total / conversion_rate, {2}),
tax.base_tax_amount_after_discount_amount = round(tax.tax_amount_after_discount_amount, {2}),
tax.tax_amount_after_discount_amount = round(tax.tax_amount_after_discount_amount / conversion_rate, {2})
WHERE
par.name = tax.parent
and par.docstatus < 2
""".format(dt, tax_table, tax_amount_precision))
|
sahildua2305/libcloud
|
refs/heads/trunk
|
libcloud/compute/drivers/vcloud.py
|
7
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
VMware vCloud driver.
"""
import copy
import sys
import re
import base64
import os
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlencode
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import b
from libcloud.utils.py3 import next
urlparse = urlparse.urlparse
import time
try:
from lxml import etree as ET
except ImportError:
from xml.etree import ElementTree as ET
from xml.parsers.expat import ExpatError
from libcloud.common.base import XmlResponse, ConnectionUserAndKey
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.compute.providers import Provider
from libcloud.compute.types import NodeState
from libcloud.compute.base import Node, NodeDriver, NodeLocation
from libcloud.compute.base import NodeSize, NodeImage
"""
From vcloud api "The VirtualQuantity element defines the number of MB
of memory. This should be either 512 or a multiple of 1024 (1 GB)."
"""
VIRTUAL_MEMORY_VALS = [512] + [1024 * i for i in range(1, 9)]
# Default timeout (in seconds) for long running tasks
DEFAULT_TASK_COMPLETION_TIMEOUT = 600
DEFAULT_API_VERSION = '0.8'
"""
Valid vCloud API v1.5 input values.
"""
VIRTUAL_CPU_VALS_1_5 = [i for i in range(1, 9)]
FENCE_MODE_VALS_1_5 = ['bridged', 'isolated', 'natRouted']
IP_MODE_VALS_1_5 = ['POOL', 'DHCP', 'MANUAL', 'NONE']
def fixxpath(root, xpath):
"""ElementTree wants namespaces in its xpaths, so here we add them."""
namespace, root_tag = root.tag[1:].split("}", 1)
fixed_xpath = "/".join(["{%s}%s" % (namespace, e)
for e in xpath.split("/")])
return fixed_xpath
def get_url_path(url):
return urlparse(url.strip()).path
class Vdc(object):
"""
Virtual datacenter (vDC) representation
"""
def __init__(self, id, name, driver, allocation_model=None, cpu=None,
memory=None, storage=None):
self.id = id
self.name = name
self.driver = driver
self.allocation_model = allocation_model
self.cpu = cpu
self.memory = memory
self.storage = storage
def __repr__(self):
return ('<Vdc: id=%s, name=%s, driver=%s ...>'
% (self.id, self.name, self.driver.name))
class Capacity(object):
"""
Represents CPU, Memory or Storage capacity of vDC.
"""
def __init__(self, limit, used, units):
self.limit = limit
self.used = used
self.units = units
def __repr__(self):
return ('<Capacity: limit=%s, used=%s, units=%s>'
% (self.limit, self.used, self.units))
class ControlAccess(object):
"""
Represents control access settings of a node
"""
class AccessLevel(object):
READ_ONLY = 'ReadOnly'
CHANGE = 'Change'
FULL_CONTROL = 'FullControl'
def __init__(self, node, everyone_access_level, subjects=None):
self.node = node
self.everyone_access_level = everyone_access_level
if not subjects:
subjects = []
self.subjects = subjects
def __repr__(self):
return ('<ControlAccess: node=%s, everyone_access_level=%s, '
'subjects=%s>'
% (self.node, self.everyone_access_level, self.subjects))
class Subject(object):
"""
User or group subject
"""
def __init__(self, type, name, access_level, id=None):
self.type = type
self.name = name
self.access_level = access_level
self.id = id
def __repr__(self):
return ('<Subject: type=%s, name=%s, access_level=%s>'
% (self.type, self.name, self.access_level))
class InstantiateVAppXML(object):
def __init__(self, name, template, net_href, cpus, memory,
password=None, row=None, group=None):
self.name = name
self.template = template
self.net_href = net_href
self.cpus = cpus
self.memory = memory
self.password = password
self.row = row
self.group = group
self._build_xmltree()
def tostring(self):
return ET.tostring(self.root)
def _build_xmltree(self):
self.root = self._make_instantiation_root()
self._add_vapp_template(self.root)
instantiation_params = ET.SubElement(self.root,
"InstantiationParams")
# product and virtual hardware
self._make_product_section(instantiation_params)
self._make_virtual_hardware(instantiation_params)
network_config_section = ET.SubElement(instantiation_params,
"NetworkConfigSection")
network_config = ET.SubElement(network_config_section,
"NetworkConfig")
self._add_network_association(network_config)
def _make_instantiation_root(self):
return ET.Element(
"InstantiateVAppTemplateParams",
{'name': self.name,
'xml:lang': 'en',
'xmlns': "http://www.vmware.com/vcloud/v0.8",
'xmlns:xsi': "http://www.w3.org/2001/XMLSchema-instance"}
)
def _add_vapp_template(self, parent):
return ET.SubElement(
parent,
"VAppTemplate",
{'href': self.template}
)
def _make_product_section(self, parent):
prod_section = ET.SubElement(
parent,
"ProductSection",
{'xmlns:q1': "http://www.vmware.com/vcloud/v0.8",
'xmlns:ovf': "http://schemas.dmtf.org/ovf/envelope/1"}
)
if self.password:
self._add_property(prod_section, 'password', self.password)
if self.row:
self._add_property(prod_section, 'row', self.row)
if self.group:
self._add_property(prod_section, 'group', self.group)
return prod_section
def _add_property(self, parent, ovfkey, ovfvalue):
return ET.SubElement(
parent,
"Property",
{'xmlns': 'http://schemas.dmtf.org/ovf/envelope/1',
'ovf:key': ovfkey,
'ovf:value': ovfvalue}
)
def _make_virtual_hardware(self, parent):
vh = ET.SubElement(
parent,
"VirtualHardwareSection",
{'xmlns:q1': "http://www.vmware.com/vcloud/v0.8"}
)
self._add_cpu(vh)
self._add_memory(vh)
return vh
def _add_cpu(self, parent):
cpu_item = ET.SubElement(
parent,
"Item",
{'xmlns': "http://schemas.dmtf.org/ovf/envelope/1"}
)
self._add_instance_id(cpu_item, '1')
self._add_resource_type(cpu_item, '3')
self._add_virtual_quantity(cpu_item, self.cpus)
return cpu_item
def _add_memory(self, parent):
mem_item = ET.SubElement(
parent,
'Item',
{'xmlns': "http://schemas.dmtf.org/ovf/envelope/1"}
)
self._add_instance_id(mem_item, '2')
self._add_resource_type(mem_item, '4')
self._add_virtual_quantity(mem_item, self.memory)
return mem_item
def _add_instance_id(self, parent, id):
elm = ET.SubElement(
parent,
'InstanceID',
{'xmlns': 'http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData'}
)
elm.text = id
return elm
def _add_resource_type(self, parent, type):
elm = ET.SubElement(
parent,
'ResourceType',
{'xmlns': 'http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData'}
)
elm.text = type
return elm
def _add_virtual_quantity(self, parent, amount):
elm = ET.SubElement(
parent,
'VirtualQuantity',
{'xmlns': 'http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData'}
)
elm.text = amount
return elm
def _add_network_association(self, parent):
return ET.SubElement(
parent,
'NetworkAssociation',
{'href': self.net_href}
)
class VCloudResponse(XmlResponse):
def success(self):
return self.status in (httplib.OK, httplib.CREATED,
httplib.NO_CONTENT, httplib.ACCEPTED)
class VCloudConnection(ConnectionUserAndKey):
"""
Connection class for the vCloud driver
"""
responseCls = VCloudResponse
token = None
host = None
def request(self, *args, **kwargs):
self._get_auth_token()
return super(VCloudConnection, self).request(*args, **kwargs)
def check_org(self):
# the only way to get our org is by logging in.
self._get_auth_token()
def _get_auth_headers(self):
"""Some providers need different headers than others"""
return {
'Authorization': "Basic %s" % base64.b64encode(
b('%s:%s' % (self.user_id, self.key))).decode('utf-8'),
'Content-Length': '0',
'Accept': 'application/*+xml'
}
def _get_auth_token(self):
if not self.token:
self.connection.request(method='POST', url='/api/v0.8/login',
headers=self._get_auth_headers())
resp = self.connection.getresponse()
headers = dict(resp.getheaders())
body = ET.XML(resp.read())
try:
self.token = headers['set-cookie']
except KeyError:
raise InvalidCredsError()
self.driver.org = get_url_path(
body.find(fixxpath(body, 'Org')).get('href')
)
def add_default_headers(self, headers):
headers['Cookie'] = self.token
headers['Accept'] = 'application/*+xml'
return headers
class VCloudNodeDriver(NodeDriver):
"""
vCloud node driver
"""
type = Provider.VCLOUD
name = 'vCloud'
website = 'http://www.vmware.com/products/vcloud/'
connectionCls = VCloudConnection
org = None
_vdcs = None
NODE_STATE_MAP = {'0': NodeState.PENDING,
'1': NodeState.PENDING,
'2': NodeState.PENDING,
'3': NodeState.PENDING,
'4': NodeState.RUNNING}
features = {'create_node': ['password']}
def __new__(cls, key, secret=None, secure=True, host=None, port=None,
api_version=DEFAULT_API_VERSION, **kwargs):
if cls is VCloudNodeDriver:
if api_version == '0.8':
cls = VCloudNodeDriver
elif api_version == '1.5':
cls = VCloud_1_5_NodeDriver
elif api_version == '5.1':
cls = VCloud_5_1_NodeDriver
else:
raise NotImplementedError(
"No VCloudNodeDriver found for API version %s" %
(api_version))
return super(VCloudNodeDriver, cls).__new__(cls)
@property
def vdcs(self):
"""
vCloud virtual data centers (vDCs).
:return: list of vDC objects
:rtype: ``list`` of :class:`Vdc`
"""
if not self._vdcs:
self.connection.check_org() # make sure the org is set.
res = self.connection.request(self.org)
self._vdcs = [
self._to_vdc(
self.connection.request(get_url_path(i.get('href'))).object
)
for i in res.object.findall(fixxpath(res.object, "Link"))
if i.get('type') == 'application/vnd.vmware.vcloud.vdc+xml'
]
return self._vdcs
def _to_vdc(self, vdc_elm):
return Vdc(vdc_elm.get('href'), vdc_elm.get('name'), self)
def _get_vdc(self, vdc_name):
vdc = None
if not vdc_name:
# Return the first organisation VDC found
vdc = self.vdcs[0]
else:
for v in self.vdcs:
if v.name == vdc_name:
vdc = v
if vdc is None:
raise ValueError('%s virtual data centre could not be found',
vdc_name)
return vdc
@property
def networks(self):
networks = []
for vdc in self.vdcs:
res = self.connection.request(get_url_path(vdc.id)).object
networks.extend(
[network
for network in res.findall(
fixxpath(res, 'AvailableNetworks/Network')
)]
)
return networks
def _to_image(self, image):
image = NodeImage(id=image.get('href'),
name=image.get('name'),
driver=self.connection.driver)
return image
def _to_node(self, elm):
state = self.NODE_STATE_MAP[elm.get('status')]
name = elm.get('name')
public_ips = []
private_ips = []
# Following code to find private IPs works for Terremark
connections = elm.findall('%s/%s' % (
'{http://schemas.dmtf.org/ovf/envelope/1}NetworkConnectionSection',
fixxpath(elm, 'NetworkConnection'))
)
if not connections:
connections = elm.findall(
fixxpath(
elm,
'Children/Vm/NetworkConnectionSection/NetworkConnection'))
for connection in connections:
ips = [ip.text
for ip
in connection.findall(fixxpath(elm, "IpAddress"))]
if connection.get('Network') == 'Internal':
private_ips.extend(ips)
else:
public_ips.extend(ips)
node = Node(id=elm.get('href'),
name=name,
state=state,
public_ips=public_ips,
private_ips=private_ips,
driver=self.connection.driver)
return node
def _get_catalog_hrefs(self):
res = self.connection.request(self.org)
catalogs = [
i.get('href')
for i in res.object.findall(fixxpath(res.object, "Link"))
if i.get('type') == 'application/vnd.vmware.vcloud.catalog+xml'
]
return catalogs
def _wait_for_task_completion(self, task_href,
timeout=DEFAULT_TASK_COMPLETION_TIMEOUT):
start_time = time.time()
res = self.connection.request(get_url_path(task_href))
status = res.object.get('status')
while status != 'success':
if status == 'error':
# Get error reason from the response body
error_elem = res.object.find(fixxpath(res.object, 'Error'))
error_msg = "Unknown error"
if error_elem is not None:
error_msg = error_elem.get('message')
raise Exception("Error status returned by task %s.: %s"
% (task_href, error_msg))
if status == 'canceled':
raise Exception("Canceled status returned by task %s."
% task_href)
if (time.time() - start_time >= timeout):
raise Exception("Timeout (%s sec) while waiting for task %s."
% (timeout, task_href))
time.sleep(5)
res = self.connection.request(get_url_path(task_href))
status = res.object.get('status')
def destroy_node(self, node):
node_path = get_url_path(node.id)
# blindly poweroff node, it will throw an exception if already off
try:
res = self.connection.request('%s/power/action/poweroff'
% node_path,
method='POST')
self._wait_for_task_completion(res.object.get('href'))
except Exception:
pass
try:
res = self.connection.request('%s/action/undeploy' % node_path,
method='POST')
self._wait_for_task_completion(res.object.get('href'))
except ExpatError:
# The undeploy response is malformed XML atm.
# We can remove this whent he providers fix the problem.
pass
except Exception:
# Some vendors don't implement undeploy at all yet,
# so catch this and move on.
pass
res = self.connection.request(node_path, method='DELETE')
return res.status == httplib.ACCEPTED
def reboot_node(self, node):
res = self.connection.request('%s/power/action/reset'
% get_url_path(node.id),
method='POST')
return res.status in [httplib.ACCEPTED, httplib.NO_CONTENT]
def list_nodes(self):
return self.ex_list_nodes()
def ex_list_nodes(self, vdcs=None):
"""
List all nodes across all vDCs. Using 'vdcs' you can specify which vDCs
should be queried.
:param vdcs: None, vDC or a list of vDCs to query. If None all vDCs
will be queried.
:type vdcs: :class:`Vdc`
:rtype: ``list`` of :class:`Node`
"""
if not vdcs:
vdcs = self.vdcs
if not isinstance(vdcs, (list, tuple)):
vdcs = [vdcs]
nodes = []
for vdc in vdcs:
res = self.connection.request(get_url_path(vdc.id))
elms = res.object.findall(fixxpath(
res.object, "ResourceEntities/ResourceEntity")
)
vapps = [
(i.get('name'), i.get('href'))
for i in elms if
i.get('type') == 'application/vnd.vmware.vcloud.vApp+xml' and
i.get('name')
]
for vapp_name, vapp_href in vapps:
try:
res = self.connection.request(
get_url_path(vapp_href),
headers={'Content-Type':
'application/vnd.vmware.vcloud.vApp+xml'}
)
nodes.append(self._to_node(res.object))
except Exception:
# The vApp was probably removed since the previous vDC
# query, ignore
e = sys.exc_info()[1]
if not (e.args[0].tag.endswith('Error') and
e.args[0].get('minorErrorCode') ==
'ACCESS_TO_RESOURCE_IS_FORBIDDEN'):
raise
return nodes
def _to_size(self, ram):
ns = NodeSize(
id=None,
name="%s Ram" % ram,
ram=ram,
disk=None,
bandwidth=None,
price=None,
driver=self.connection.driver
)
return ns
def list_sizes(self, location=None):
sizes = [self._to_size(i) for i in VIRTUAL_MEMORY_VALS]
return sizes
def _get_catalogitems_hrefs(self, catalog):
"""Given a catalog href returns contained catalog item hrefs"""
res = self.connection.request(
get_url_path(catalog),
headers={
'Content-Type': 'application/vnd.vmware.vcloud.catalog+xml'
}
).object
cat_items = res.findall(fixxpath(res, "CatalogItems/CatalogItem"))
cat_item_hrefs = [i.get('href')
for i in cat_items
if i.get('type') ==
'application/vnd.vmware.vcloud.catalogItem+xml']
return cat_item_hrefs
def _get_catalogitem(self, catalog_item):
"""Given a catalog item href returns elementree"""
res = self.connection.request(
get_url_path(catalog_item),
headers={
'Content-Type': 'application/vnd.vmware.vcloud.catalogItem+xml'
}
).object
return res
def list_images(self, location=None):
images = []
for vdc in self.vdcs:
res = self.connection.request(get_url_path(vdc.id)).object
res_ents = res.findall(fixxpath(
res, "ResourceEntities/ResourceEntity")
)
images += [
self._to_image(i)
for i in res_ents
if i.get('type') ==
'application/vnd.vmware.vcloud.vAppTemplate+xml'
]
for catalog in self._get_catalog_hrefs():
for cat_item in self._get_catalogitems_hrefs(catalog):
res = self._get_catalogitem(cat_item)
res_ents = res.findall(fixxpath(res, 'Entity'))
images += [
self._to_image(i)
for i in res_ents
if i.get('type') ==
'application/vnd.vmware.vcloud.vAppTemplate+xml'
]
def idfun(image):
return image.id
return self._uniquer(images, idfun)
def _uniquer(self, seq, idfun=None):
if idfun is None:
def idfun(x):
return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen:
continue
seen[marker] = 1
result.append(item)
return result
def create_node(self, **kwargs):
"""
Creates and returns node.
:keyword ex_network: link to a "Network" e.g.,
``https://services.vcloudexpress...``
:type ex_network: ``str``
:keyword ex_vdc: Name of organisation's virtual data
center where vApp VMs will be deployed.
:type ex_vdc: ``str``
:keyword ex_cpus: number of virtual cpus (limit depends on provider)
:type ex_cpus: ``int``
:type ex_row: ``str``
:type ex_group: ``str``
"""
name = kwargs['name']
image = kwargs['image']
size = kwargs['size']
# Some providers don't require a network link
try:
network = kwargs.get('ex_network', self.networks[0].get('href'))
except IndexError:
network = ''
password = None
auth = self._get_and_check_auth(kwargs.get('auth'))
password = auth.password
instantiate_xml = InstantiateVAppXML(
name=name,
template=image.id,
net_href=network,
cpus=str(kwargs.get('ex_cpus', 1)),
memory=str(size.ram),
password=password,
row=kwargs.get('ex_row', None),
group=kwargs.get('ex_group', None)
)
vdc = self._get_vdc(kwargs.get('ex_vdc', None))
# Instantiate VM and get identifier.
content_type = \
'application/vnd.vmware.vcloud.instantiateVAppTemplateParams+xml'
res = self.connection.request(
'%s/action/instantiateVAppTemplate' % get_url_path(vdc.id),
data=instantiate_xml.tostring(),
method='POST',
headers={'Content-Type': content_type}
)
vapp_path = get_url_path(res.object.get('href'))
# Deploy the VM from the identifier.
res = self.connection.request('%s/action/deploy' % vapp_path,
method='POST')
self._wait_for_task_completion(res.object.get('href'))
# Power on the VM.
res = self.connection.request('%s/power/action/powerOn' % vapp_path,
method='POST')
res = self.connection.request(vapp_path)
node = self._to_node(res.object)
if getattr(auth, "generated", False):
node.extra['password'] = auth.password
return node
class HostingComConnection(VCloudConnection):
"""
vCloud connection subclass for Hosting.com
"""
host = "vcloud.safesecureweb.com"
def _get_auth_headers(self):
"""hosting.com doesn't follow the standard vCloud authentication API"""
return {
'Authentication': base64.b64encode(b('%s:%s' % (self.user_id,
self.key))),
'Content-Length': '0'
}
class HostingComDriver(VCloudNodeDriver):
"""
vCloud node driver for Hosting.com
"""
connectionCls = HostingComConnection
class TerremarkConnection(VCloudConnection):
"""
vCloud connection subclass for Terremark
"""
host = "services.vcloudexpress.terremark.com"
class TerremarkDriver(VCloudNodeDriver):
"""
vCloud node driver for Terremark
"""
connectionCls = TerremarkConnection
def list_locations(self):
return [NodeLocation(0, "Terremark Texas", 'US', self)]
class VCloud_1_5_Connection(VCloudConnection):
def _get_auth_headers(self):
"""Compatibility for using v1.5 API under vCloud Director 5.1"""
return {
'Authorization': "Basic %s" % base64.b64encode(
b('%s:%s' % (self.user_id, self.key))).decode('utf-8'),
'Content-Length': '0',
'Accept': 'application/*+xml;version=1.5'
}
def _get_auth_token(self):
if not self.token:
# Log In
self.connection.request(method='POST', url='/api/sessions',
headers=self._get_auth_headers())
resp = self.connection.getresponse()
headers = dict(resp.getheaders())
# Set authorization token
try:
self.token = headers['x-vcloud-authorization']
except KeyError:
raise InvalidCredsError()
# Get the URL of the Organization
body = ET.XML(resp.read())
self.org_name = body.get('org')
org_list_url = get_url_path(
next((link for link in body.findall(fixxpath(body, 'Link'))
if link.get('type') ==
'application/vnd.vmware.vcloud.orgList+xml')).get('href')
)
self.connection.set_http_proxy(self.proxy_url)
self.connection.request(method='GET', url=org_list_url,
headers=self.add_default_headers({}))
body = ET.XML(self.connection.getresponse().read())
self.driver.org = get_url_path(
next((org for org in body.findall(fixxpath(body, 'Org'))
if org.get('name') == self.org_name)).get('href')
)
def add_default_headers(self, headers):
headers['Accept'] = 'application/*+xml;version=1.5'
headers['x-vcloud-authorization'] = self.token
return headers
class Instantiate_1_5_VAppXML(object):
def __init__(self, name, template, network, vm_network=None,
vm_fence=None):
self.name = name
self.template = template
self.network = network
self.vm_network = vm_network
self.vm_fence = vm_fence
self._build_xmltree()
def tostring(self):
return ET.tostring(self.root)
def _build_xmltree(self):
self.root = self._make_instantiation_root()
if self.network is not None:
instantionation_params = ET.SubElement(self.root,
'InstantiationParams')
network_config_section = ET.SubElement(instantionation_params,
'NetworkConfigSection')
ET.SubElement(
network_config_section,
'Info',
{'xmlns': 'http://schemas.dmtf.org/ovf/envelope/1'}
)
network_config = ET.SubElement(network_config_section,
'NetworkConfig')
self._add_network_association(network_config)
self._add_vapp_template(self.root)
def _make_instantiation_root(self):
return ET.Element(
'InstantiateVAppTemplateParams',
{'name': self.name,
'deploy': 'false',
'powerOn': 'false',
'xml:lang': 'en',
'xmlns': 'http://www.vmware.com/vcloud/v1.5',
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
)
def _add_vapp_template(self, parent):
return ET.SubElement(
parent,
'Source',
{'href': self.template}
)
def _add_network_association(self, parent):
if self.vm_network is None:
# Don't set a custom vApp VM network name
parent.set('networkName', self.network.get('name'))
else:
# Set a custom vApp VM network name
parent.set('networkName', self.vm_network)
configuration = ET.SubElement(parent, 'Configuration')
ET.SubElement(configuration, 'ParentNetwork',
{'href': self.network.get('href')})
if self.vm_fence is None:
fencemode = self.network.find(fixxpath(self.network,
'Configuration/FenceMode')).text
else:
fencemode = self.vm_fence
ET.SubElement(configuration, 'FenceMode').text = fencemode
class VCloud_1_5_NodeDriver(VCloudNodeDriver):
connectionCls = VCloud_1_5_Connection
# Based on
# http://pubs.vmware.com/vcloud-api-1-5/api_prog/
# GUID-843BE3AD-5EF6-4442-B864-BCAE44A51867.html
NODE_STATE_MAP = {'-1': NodeState.UNKNOWN,
'0': NodeState.PENDING,
'1': NodeState.PENDING,
'2': NodeState.PENDING,
'3': NodeState.PENDING,
'4': NodeState.RUNNING,
'5': NodeState.RUNNING,
'6': NodeState.UNKNOWN,
'7': NodeState.UNKNOWN,
'8': NodeState.STOPPED,
'9': NodeState.UNKNOWN,
'10': NodeState.UNKNOWN}
def list_locations(self):
return [NodeLocation(id=self.connection.host,
name=self.connection.host, country="N/A", driver=self)]
def ex_find_node(self, node_name, vdcs=None):
"""
Searches for node across specified vDCs. This is more effective than
querying all nodes to get a single instance.
:param node_name: The name of the node to search for
:type node_name: ``str``
:param vdcs: None, vDC or a list of vDCs to search in. If None all vDCs
will be searched.
:type vdcs: :class:`Vdc`
:return: node instance or None if not found
:rtype: :class:`Node` or ``None``
"""
if not vdcs:
vdcs = self.vdcs
if not getattr(vdcs, '__iter__', False):
vdcs = [vdcs]
for vdc in vdcs:
res = self.connection.request(get_url_path(vdc.id))
xpath = fixxpath(res.object, "ResourceEntities/ResourceEntity")
entity_elems = res.object.findall(xpath)
for entity_elem in entity_elems:
if entity_elem.get('type') == \
'application/vnd.vmware.vcloud.vApp+xml' and \
entity_elem.get('name') == node_name:
path = get_url_path(entity_elem.get('href'))
headers = {'Content-Type':
'application/vnd.vmware.vcloud.vApp+xml'}
res = self.connection.request(path,
headers=headers)
return self._to_node(res.object)
return None
def destroy_node(self, node):
try:
self.ex_undeploy_node(node)
except Exception:
# Some vendors don't implement undeploy at all yet,
# so catch this and move on.
pass
res = self.connection.request(get_url_path(node.id), method='DELETE')
return res.status == httplib.ACCEPTED
def reboot_node(self, node):
res = self.connection.request('%s/power/action/reset'
% get_url_path(node.id),
method='POST')
if res.status in [httplib.ACCEPTED, httplib.NO_CONTENT]:
self._wait_for_task_completion(res.object.get('href'))
return True
else:
return False
def ex_deploy_node(self, node):
"""
Deploys existing node. Equal to vApp "start" operation.
:param node: The node to be deployed
:type node: :class:`Node`
:rtype: :class:`Node`
"""
data = {'powerOn': 'true',
'xmlns': 'http://www.vmware.com/vcloud/v1.5'}
deploy_xml = ET.Element('DeployVAppParams', data)
path = get_url_path(node.id)
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.deployVAppParams+xml'
}
res = self.connection.request('%s/action/deploy' % path,
data=ET.tostring(deploy_xml),
method='POST',
headers=headers)
self._wait_for_task_completion(res.object.get('href'))
res = self.connection.request(get_url_path(node.id))
return self._to_node(res.object)
def ex_undeploy_node(self, node):
"""
Undeploys existing node. Equal to vApp "stop" operation.
:param node: The node to be deployed
:type node: :class:`Node`
:rtype: :class:`Node`
"""
data = {'xmlns': 'http://www.vmware.com/vcloud/v1.5'}
undeploy_xml = ET.Element('UndeployVAppParams', data)
undeploy_power_action_xml = ET.SubElement(undeploy_xml,
'UndeployPowerAction')
undeploy_power_action_xml.text = 'shutdown'
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.undeployVAppParams+xml'
}
try:
res = self.connection.request(
'%s/action/undeploy' % get_url_path(node.id),
data=ET.tostring(undeploy_xml),
method='POST',
headers=headers)
self._wait_for_task_completion(res.object.get('href'))
except Exception:
undeploy_power_action_xml.text = 'powerOff'
res = self.connection.request(
'%s/action/undeploy' % get_url_path(node.id),
data=ET.tostring(undeploy_xml),
method='POST',
headers=headers)
self._wait_for_task_completion(res.object.get('href'))
res = self.connection.request(get_url_path(node.id))
return self._to_node(res.object)
def ex_power_off_node(self, node):
"""
Powers on all VMs under specified node. VMs need to be This operation
is allowed only when the vApp/VM is powered on.
:param node: The node to be powered off
:type node: :class:`Node`
:rtype: :class:`Node`
"""
return self._perform_power_operation(node, 'powerOff')
def ex_power_on_node(self, node):
"""
Powers on all VMs under specified node. This operation is allowed
only when the vApp/VM is powered off or suspended.
:param node: The node to be powered on
:type node: :class:`Node`
:rtype: :class:`Node`
"""
return self._perform_power_operation(node, 'powerOn')
def ex_shutdown_node(self, node):
"""
Shutdowns all VMs under specified node. This operation is allowed only
when the vApp/VM is powered on.
:param node: The node to be shut down
:type node: :class:`Node`
:rtype: :class:`Node`
"""
return self._perform_power_operation(node, 'shutdown')
def ex_suspend_node(self, node):
"""
Suspends all VMs under specified node. This operation is allowed only
when the vApp/VM is powered on.
:param node: The node to be suspended
:type node: :class:`Node`
:rtype: :class:`Node`
"""
return self._perform_power_operation(node, 'suspend')
def _perform_power_operation(self, node, operation):
res = self.connection.request(
'%s/power/action/%s' % (get_url_path(node.id), operation),
method='POST')
self._wait_for_task_completion(res.object.get('href'))
res = self.connection.request(get_url_path(node.id))
return self._to_node(res.object)
def ex_get_control_access(self, node):
"""
Returns the control access settings for specified node.
:param node: node to get the control access for
:type node: :class:`Node`
:rtype: :class:`ControlAccess`
"""
res = self.connection.request(
'%s/controlAccess' % get_url_path(node.id))
everyone_access_level = None
is_shared_elem = res.object.find(
fixxpath(res.object, "IsSharedToEveryone"))
if is_shared_elem is not None and is_shared_elem.text == 'true':
everyone_access_level = res.object.find(
fixxpath(res.object, "EveryoneAccessLevel")).text
# Parse all subjects
subjects = []
xpath = fixxpath(res.object, "AccessSettings/AccessSetting")
for elem in res.object.findall(xpath):
access_level = elem.find(fixxpath(res.object, "AccessLevel")).text
subject_elem = elem.find(fixxpath(res.object, "Subject"))
if subject_elem.get('type') == \
'application/vnd.vmware.admin.group+xml':
subj_type = 'group'
else:
subj_type = 'user'
path = get_url_path(subject_elem.get('href'))
res = self.connection.request(path)
name = res.object.get('name')
subject = Subject(type=subj_type,
name=name,
access_level=access_level,
id=subject_elem.get('href'))
subjects.append(subject)
return ControlAccess(node, everyone_access_level, subjects)
def ex_set_control_access(self, node, control_access):
"""
Sets control access for the specified node.
:param node: node
:type node: :class:`Node`
:param control_access: control access settings
:type control_access: :class:`ControlAccess`
:rtype: ``None``
"""
xml = ET.Element('ControlAccessParams',
{'xmlns': 'http://www.vmware.com/vcloud/v1.5'})
shared_to_everyone = ET.SubElement(xml, 'IsSharedToEveryone')
if control_access.everyone_access_level:
shared_to_everyone.text = 'true'
everyone_access_level = ET.SubElement(xml, 'EveryoneAccessLevel')
everyone_access_level.text = control_access.everyone_access_level
else:
shared_to_everyone.text = 'false'
# Set subjects
if control_access.subjects:
access_settings_elem = ET.SubElement(xml, 'AccessSettings')
for subject in control_access.subjects:
setting = ET.SubElement(access_settings_elem, 'AccessSetting')
if subject.id:
href = subject.id
else:
res = self.ex_query(type=subject.type, filter='name==' +
subject.name)
if not res:
raise LibcloudError('Specified subject "%s %s" not found '
% (subject.type, subject.name))
href = res[0]['href']
ET.SubElement(setting, 'Subject', {'href': href})
ET.SubElement(setting, 'AccessLevel').text = subject.access_level
headers = {
'Content-Type': 'application/vnd.vmware.vcloud.controlAccess+xml'
}
self.connection.request(
'%s/action/controlAccess' % get_url_path(node.id),
data=ET.tostring(xml),
headers=headers,
method='POST')
def ex_get_metadata(self, node):
"""
:param node: node
:type node: :class:`Node`
:return: dictionary mapping metadata keys to metadata values
:rtype: dictionary mapping ``str`` to ``str``
"""
res = self.connection.request('%s/metadata' % (get_url_path(node.id)))
xpath = fixxpath(res.object, 'MetadataEntry')
metadata_entries = res.object.findall(xpath)
res_dict = {}
for entry in metadata_entries:
key = entry.findtext(fixxpath(res.object, 'Key'))
value = entry.findtext(fixxpath(res.object, 'Value'))
res_dict[key] = value
return res_dict
def ex_set_metadata_entry(self, node, key, value):
"""
:param node: node
:type node: :class:`Node`
:param key: metadata key to be set
:type key: ``str``
:param value: metadata value to be set
:type value: ``str``
:rtype: ``None``
"""
metadata_elem = ET.Element(
'Metadata',
{'xmlns': "http://www.vmware.com/vcloud/v1.5",
'xmlns:xsi': "http://www.w3.org/2001/XMLSchema-instance"}
)
entry = ET.SubElement(metadata_elem, 'MetadataEntry')
key_elem = ET.SubElement(entry, 'Key')
key_elem.text = key
value_elem = ET.SubElement(entry, 'Value')
value_elem.text = value
# send it back to the server
res = self.connection.request(
'%s/metadata' % get_url_path(node.id),
data=ET.tostring(metadata_elem),
headers={
'Content-Type': 'application/vnd.vmware.vcloud.metadata+xml'
},
method='POST')
self._wait_for_task_completion(res.object.get('href'))
def ex_query(self, type, filter=None, page=1, page_size=100, sort_asc=None,
sort_desc=None):
"""
Queries vCloud for specified type. See
http://www.vmware.com/pdf/vcd_15_api_guide.pdf for details. Each
element of the returned list is a dictionary with all attributes from
the record.
:param type: type to query (r.g. user, group, vApp etc.)
:type type: ``str``
:param filter: filter expression (see documentation for syntax)
:type filter: ``str``
:param page: page number
:type page: ``int``
:param page_size: page size
:type page_size: ``int``
:param sort_asc: sort in ascending order by specified field
:type sort_asc: ``str``
:param sort_desc: sort in descending order by specified field
:type sort_desc: ``str``
:rtype: ``list`` of dict
"""
# This is a workaround for filter parameter encoding
# the urllib encodes (name==Developers%20Only) into
# %28name%3D%3DDevelopers%20Only%29) which is not accepted by vCloud
params = {
'type': type,
'pageSize': page_size,
'page': page,
}
if sort_asc:
params['sortAsc'] = sort_asc
if sort_desc:
params['sortDesc'] = sort_desc
url = '/api/query?' + urlencode(params)
if filter:
if not filter.startswith('('):
filter = '(' + filter + ')'
url += '&filter=' + filter.replace(' ', '+')
results = []
res = self.connection.request(url)
for elem in res.object:
if not elem.tag.endswith('Link'):
result = elem.attrib
result['type'] = elem.tag.split('}')[1]
results.append(result)
return results
def create_node(self, **kwargs):
"""
Creates and returns node. If the source image is:
- vApp template - a new vApp is instantiated from template
- existing vApp - a new vApp is cloned from the source vApp. Can
not clone more vApps is parallel otherwise
resource busy error is raised.
@inherits: :class:`NodeDriver.create_node`
:keyword image: OS Image to boot on node. (required). Can be a
NodeImage or existing Node that will be cloned.
:type image: :class:`NodeImage` or :class:`Node`
:keyword ex_network: Organisation's network name for attaching vApp
VMs to.
:type ex_network: ``str``
:keyword ex_vdc: Name of organisation's virtual data center where
vApp VMs will be deployed.
:type ex_vdc: ``str``
:keyword ex_vm_names: list of names to be used as a VM and computer
name. The name must be max. 15 characters
long and follow the host name requirements.
:type ex_vm_names: ``list`` of ``str``
:keyword ex_vm_cpu: number of virtual CPUs/cores to allocate for
each vApp VM.
:type ex_vm_cpu: ``int``
:keyword ex_vm_memory: amount of memory in MB to allocate for each
vApp VM.
:type ex_vm_memory: ``int``
:keyword ex_vm_script: full path to file containing guest
customisation script for each vApp VM.
Useful for creating users & pushing out
public SSH keys etc.
:type ex_vm_script: ``str``
:keyword ex_vm_network: Override default vApp VM network name.
Useful for when you've imported an OVF
originating from outside of the vCloud.
:type ex_vm_network: ``str``
:keyword ex_vm_fence: Fence mode for connecting the vApp VM network
(ex_vm_network) to the parent
organisation network (ex_network).
:type ex_vm_fence: ``str``
:keyword ex_vm_ipmode: IP address allocation mode for all vApp VM
network connections.
:type ex_vm_ipmode: ``str``
:keyword ex_deploy: set to False if the node shouldn't be deployed
(started) after creation
:type ex_deploy: ``bool``
:keyword ex_clone_timeout: timeout in seconds for clone/instantiate
VM operation.
Cloning might be a time consuming
operation especially when linked clones
are disabled or VMs are created on
different datastores.
Overrides the default task completion
value.
:type ex_clone_timeout: ``int``
"""
name = kwargs['name']
image = kwargs['image']
ex_vm_names = kwargs.get('ex_vm_names')
ex_vm_cpu = kwargs.get('ex_vm_cpu')
ex_vm_memory = kwargs.get('ex_vm_memory')
ex_vm_script = kwargs.get('ex_vm_script')
ex_vm_fence = kwargs.get('ex_vm_fence', None)
ex_network = kwargs.get('ex_network', None)
ex_vm_network = kwargs.get('ex_vm_network', None)
ex_vm_ipmode = kwargs.get('ex_vm_ipmode', None)
ex_deploy = kwargs.get('ex_deploy', True)
ex_vdc = kwargs.get('ex_vdc', None)
ex_clone_timeout = kwargs.get('ex_clone_timeout',
DEFAULT_TASK_COMPLETION_TIMEOUT)
self._validate_vm_names(ex_vm_names)
self._validate_vm_cpu(ex_vm_cpu)
self._validate_vm_memory(ex_vm_memory)
self._validate_vm_fence(ex_vm_fence)
self._validate_vm_ipmode(ex_vm_ipmode)
ex_vm_script = self._validate_vm_script(ex_vm_script)
# Some providers don't require a network link
if ex_network:
network_href = self._get_network_href(ex_network)
network_elem = self.connection.request(
get_url_path(network_href)).object
else:
network_elem = None
vdc = self._get_vdc(ex_vdc)
if self._is_node(image):
vapp_name, vapp_href = self._clone_node(name,
image,
vdc,
ex_clone_timeout)
else:
vapp_name, vapp_href = self._instantiate_node(name, image,
network_elem,
vdc, ex_vm_network,
ex_vm_fence,
ex_clone_timeout)
self._change_vm_names(vapp_href, ex_vm_names)
self._change_vm_cpu(vapp_href, ex_vm_cpu)
self._change_vm_memory(vapp_href, ex_vm_memory)
self._change_vm_script(vapp_href, ex_vm_script)
self._change_vm_ipmode(vapp_href, ex_vm_ipmode)
# Power on the VM.
if ex_deploy:
# Retry 3 times: when instantiating large number of VMs at the same
# time some may fail on resource allocation
retry = 3
while True:
try:
res = self.connection.request(
'%s/power/action/powerOn' % get_url_path(vapp_href),
method='POST')
self._wait_for_task_completion(res.object.get('href'))
break
except Exception:
if retry <= 0:
raise
retry -= 1
time.sleep(10)
res = self.connection.request(get_url_path(vapp_href))
node = self._to_node(res.object)
return node
def _instantiate_node(self, name, image, network_elem, vdc, vm_network,
vm_fence, instantiate_timeout):
instantiate_xml = Instantiate_1_5_VAppXML(
name=name,
template=image.id,
network=network_elem,
vm_network=vm_network,
vm_fence=vm_fence
)
# Instantiate VM and get identifier.
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.instantiateVAppTemplateParams+xml'
}
res = self.connection.request(
'%s/action/instantiateVAppTemplate' % get_url_path(vdc.id),
data=instantiate_xml.tostring(),
method='POST',
headers=headers
)
vapp_name = res.object.get('name')
vapp_href = res.object.get('href')
task_href = res.object.find(fixxpath(res.object, "Tasks/Task")).get(
'href')
self._wait_for_task_completion(task_href, instantiate_timeout)
return vapp_name, vapp_href
def _clone_node(self, name, sourceNode, vdc, clone_timeout):
clone_xml = ET.Element(
"CloneVAppParams",
{'name': name, 'deploy': 'false', 'powerOn': 'false',
'xmlns': "http://www.vmware.com/vcloud/v1.5",
'xmlns:xsi': "http://www.w3.org/2001/XMLSchema-instance"}
)
ET.SubElement(clone_xml,
'Description').text = 'Clone of ' + sourceNode.name
ET.SubElement(clone_xml, 'Source', {'href': sourceNode.id})
headers = {
'Content-Type': 'application/vnd.vmware.vcloud.cloneVAppParams+xml'
}
res = self.connection.request(
'%s/action/cloneVApp' % get_url_path(vdc.id),
data=ET.tostring(clone_xml),
method='POST',
headers=headers
)
vapp_name = res.object.get('name')
vapp_href = res.object.get('href')
task_href = res.object.find(
fixxpath(res.object, "Tasks/Task")).get('href')
self._wait_for_task_completion(task_href, clone_timeout)
res = self.connection.request(get_url_path(vapp_href))
vms = res.object.findall(fixxpath(res.object, "Children/Vm"))
# Fix the networking for VMs
for i, vm in enumerate(vms):
# Remove network
network_xml = ET.Element("NetworkConnectionSection", {
'ovf:required': 'false',
'xmlns': "http://www.vmware.com/vcloud/v1.5",
'xmlns:ovf': 'http://schemas.dmtf.org/ovf/envelope/1'})
ET.SubElement(network_xml, "ovf:Info").text = \
'Specifies the available VM network connections'
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.networkConnectionSection+xml'
}
res = self.connection.request(
'%s/networkConnectionSection' % get_url_path(vm.get('href')),
data=ET.tostring(network_xml),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
# Re-add network
network_xml = vm.find(fixxpath(vm, 'NetworkConnectionSection'))
network_conn_xml = network_xml.find(
fixxpath(network_xml, 'NetworkConnection'))
network_conn_xml.set('needsCustomization', 'true')
network_conn_xml.remove(
network_conn_xml.find(fixxpath(network_xml, 'IpAddress')))
network_conn_xml.remove(
network_conn_xml.find(fixxpath(network_xml, 'MACAddress')))
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.networkConnectionSection+xml'
}
res = self.connection.request(
'%s/networkConnectionSection' % get_url_path(vm.get('href')),
data=ET.tostring(network_xml),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
return vapp_name, vapp_href
def ex_set_vm_cpu(self, vapp_or_vm_id, vm_cpu):
"""
Sets the number of virtual CPUs for the specified VM or VMs under
the vApp. If the vapp_or_vm_id param represents a link to an vApp
all VMs that are attached to this vApp will be modified.
Please ensure that hot-adding a virtual CPU is enabled for the
powered on virtual machines. Otherwise use this method on undeployed
vApp.
:keyword vapp_or_vm_id: vApp or VM ID that will be modified. If
a vApp ID is used here all attached VMs
will be modified
:type vapp_or_vm_id: ``str``
:keyword vm_cpu: number of virtual CPUs/cores to allocate for
specified VMs
:type vm_cpu: ``int``
:rtype: ``None``
"""
self._validate_vm_cpu(vm_cpu)
self._change_vm_cpu(vapp_or_vm_id, vm_cpu)
def ex_set_vm_memory(self, vapp_or_vm_id, vm_memory):
"""
Sets the virtual memory in MB to allocate for the specified VM or
VMs under the vApp. If the vapp_or_vm_id param represents a link
to an vApp all VMs that are attached to this vApp will be modified.
Please ensure that hot-change of virtual memory is enabled for the
powered on virtual machines. Otherwise use this method on undeployed
vApp.
:keyword vapp_or_vm_id: vApp or VM ID that will be modified. If
a vApp ID is used here all attached VMs
will be modified
:type vapp_or_vm_id: ``str``
:keyword vm_memory: virtual memory in MB to allocate for the
specified VM or VMs
:type vm_memory: ``int``
:rtype: ``None``
"""
self._validate_vm_memory(vm_memory)
self._change_vm_memory(vapp_or_vm_id, vm_memory)
def ex_add_vm_disk(self, vapp_or_vm_id, vm_disk_size):
"""
Adds a virtual disk to the specified VM or VMs under the vApp. If the
vapp_or_vm_id param represents a link to an vApp all VMs that are
attached to this vApp will be modified.
:keyword vapp_or_vm_id: vApp or VM ID that will be modified. If a
vApp ID is used here all attached VMs
will be modified
:type vapp_or_vm_id: ``str``
:keyword vm_disk_size: the disk capacity in GB that will be added
to the specified VM or VMs
:type vm_disk_size: ``int``
:rtype: ``None``
"""
self._validate_vm_disk_size(vm_disk_size)
self._add_vm_disk(vapp_or_vm_id, vm_disk_size)
@staticmethod
def _validate_vm_names(names):
if names is None:
return
hname_re = re.compile(
'^(([a-zA-Z]|[a-zA-Z][a-zA-Z0-9]*)[\-])*([A-Za-z]|[A-Za-z][A-Za-z0-9]*[A-Za-z0-9])$') # NOQA
for name in names:
if len(name) > 15:
raise ValueError(
'The VM name "' + name + '" is too long for the computer '
'name (max 15 chars allowed).')
if not hname_re.match(name):
raise ValueError('The VM name "' + name + '" can not be '
'used. "' + name + '" is not a valid '
'computer name for the VM.')
@staticmethod
def _validate_vm_memory(vm_memory):
if vm_memory is None:
return
elif vm_memory not in VIRTUAL_MEMORY_VALS:
raise ValueError(
'%s is not a valid vApp VM memory value' % vm_memory)
@staticmethod
def _validate_vm_cpu(vm_cpu):
if vm_cpu is None:
return
elif vm_cpu not in VIRTUAL_CPU_VALS_1_5:
raise ValueError('%s is not a valid vApp VM CPU value' % vm_cpu)
@staticmethod
def _validate_vm_disk_size(vm_disk):
if vm_disk is None:
return
elif int(vm_disk) < 0:
raise ValueError('%s is not a valid vApp VM disk space value',
vm_disk)
@staticmethod
def _validate_vm_script(vm_script):
if vm_script is None:
return
# Try to locate the script file
if not os.path.isabs(vm_script):
vm_script = os.path.expanduser(vm_script)
vm_script = os.path.abspath(vm_script)
if not os.path.isfile(vm_script):
raise LibcloudError(
"%s the VM script file does not exist" % vm_script)
try:
open(vm_script).read()
except:
raise
return vm_script
@staticmethod
def _validate_vm_fence(vm_fence):
if vm_fence is None:
return
elif vm_fence not in FENCE_MODE_VALS_1_5:
raise ValueError('%s is not a valid fencing mode value' % vm_fence)
@staticmethod
def _validate_vm_ipmode(vm_ipmode):
if vm_ipmode is None:
return
elif vm_ipmode == 'MANUAL':
raise NotImplementedError(
'MANUAL IP mode: The interface for supplying '
'IPAddress does not exist yet')
elif vm_ipmode not in IP_MODE_VALS_1_5:
raise ValueError(
'%s is not a valid IP address allocation mode value'
% vm_ipmode)
def _change_vm_names(self, vapp_or_vm_id, vm_names):
if vm_names is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
for i, vm in enumerate(vms):
if len(vm_names) <= i:
return
# Get GuestCustomizationSection
res = self.connection.request(
'%s/guestCustomizationSection' % get_url_path(vm.get('href')))
# Update GuestCustomizationSection
res.object.find(
fixxpath(res.object, 'ComputerName')).text = vm_names[i]
# Remove AdminPassword from customization section
admin_pass = res.object.find(fixxpath(res.object, 'AdminPassword'))
if admin_pass is not None:
res.object.remove(admin_pass)
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.guestCustomizationSection+xml'
}
res = self.connection.request(
'%s/guestCustomizationSection' % get_url_path(vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
# Update Vm name
req_xml = ET.Element("Vm", {
'name': vm_names[i],
'xmlns': "http://www.vmware.com/vcloud/v1.5"})
res = self.connection.request(
get_url_path(vm.get('href')),
data=ET.tostring(req_xml),
method='PUT',
headers={
'Content-Type': 'application/vnd.vmware.vcloud.vm+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
def _change_vm_cpu(self, vapp_or_vm_id, vm_cpu):
if vm_cpu is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
for vm in vms:
# Get virtualHardwareSection/cpu section
res = self.connection.request(
'%s/virtualHardwareSection/cpu' % get_url_path(vm.get('href')))
# Update VirtualQuantity field
xpath = ('{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData}VirtualQuantity')
res.object.find(xpath).text = str(vm_cpu)
headers = {
'Content-Type': 'application/vnd.vmware.vcloud.rasdItem+xml'
}
res = self.connection.request(
'%s/virtualHardwareSection/cpu' % get_url_path(vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _change_vm_memory(self, vapp_or_vm_id, vm_memory):
if vm_memory is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
for vm in vms:
# Get virtualHardwareSection/memory section
res = self.connection.request(
'%s/virtualHardwareSection/memory' %
get_url_path(vm.get('href')))
# Update VirtualQuantity field
xpath = ('{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData}VirtualQuantity')
res.object.find(xpath).text = str(vm_memory)
headers = {
'Content-Type': 'application/vnd.vmware.vcloud.rasdItem+xml'
}
res = self.connection.request(
'%s/virtualHardwareSection/memory' % get_url_path(
vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _add_vm_disk(self, vapp_or_vm_id, vm_disk):
if vm_disk is None:
return
rasd_ns = ('{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData}')
vms = self._get_vm_elements(vapp_or_vm_id)
for vm in vms:
# Get virtualHardwareSection/disks section
res = self.connection.request(
'%s/virtualHardwareSection/disks' %
get_url_path(vm.get('href')))
existing_ids = []
new_disk = None
for item in res.object.findall(fixxpath(res.object, 'Item')):
# Clean Items from unnecessary stuff
for elem in item:
if elem.tag == '%sInstanceID' % rasd_ns:
existing_ids.append(int(elem.text))
if elem.tag in ['%sAddressOnParent' % rasd_ns,
'%sParent' % rasd_ns]:
item.remove(elem)
if item.find('%sHostResource' % rasd_ns) is not None:
new_disk = item
new_disk = copy.deepcopy(new_disk)
disk_id = max(existing_ids) + 1
new_disk.find('%sInstanceID' % rasd_ns).text = str(disk_id)
new_disk.find('%sElementName' %
rasd_ns).text = 'Hard Disk ' + str(disk_id)
new_disk.find('%sHostResource' % rasd_ns).set(
fixxpath(new_disk, 'capacity'), str(int(vm_disk) * 1024))
res.object.append(new_disk)
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.rasditemslist+xml'
}
res = self.connection.request(
'%s/virtualHardwareSection/disks' % get_url_path(
vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _change_vm_script(self, vapp_or_vm_id, vm_script):
if vm_script is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
try:
script = open(vm_script).read()
except:
return
# ElementTree escapes script characters automatically. Escape
# requirements:
# http://www.vmware.com/support/vcd/doc/rest-api-doc-1.5-html/types/
# GuestCustomizationSectionType.html
for vm in vms:
# Get GuestCustomizationSection
res = self.connection.request(
'%s/guestCustomizationSection' % get_url_path(vm.get('href')))
# Attempt to update any existing CustomizationScript element
try:
res.object.find(
fixxpath(res.object, 'CustomizationScript')).text = script
except:
# CustomizationScript section does not exist, insert it just
# before ComputerName
for i, e in enumerate(res.object):
if e.tag == \
'{http://www.vmware.com/vcloud/v1.5}ComputerName':
break
e = ET.Element(
'{http://www.vmware.com/vcloud/v1.5}CustomizationScript')
e.text = script
res.object.insert(i, e)
# Remove AdminPassword from customization section due to an API
# quirk
admin_pass = res.object.find(fixxpath(res.object, 'AdminPassword'))
if admin_pass is not None:
res.object.remove(admin_pass)
# Update VM's GuestCustomizationSection
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.guestCustomizationSection+xml'
}
res = self.connection.request(
'%s/guestCustomizationSection' % get_url_path(vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _change_vm_ipmode(self, vapp_or_vm_id, vm_ipmode):
if vm_ipmode is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
for vm in vms:
res = self.connection.request(
'%s/networkConnectionSection' % get_url_path(vm.get('href')))
net_conns = res.object.findall(
fixxpath(res.object, 'NetworkConnection'))
for c in net_conns:
c.find(fixxpath(c, 'IpAddressAllocationMode')).text = vm_ipmode
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.networkConnectionSection+xml'
}
res = self.connection.request(
'%s/networkConnectionSection' % get_url_path(vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _get_network_href(self, network_name):
network_href = None
# Find the organisation's network href
res = self.connection.request(self.org)
links = res.object.findall(fixxpath(res.object, 'Link'))
for l in links:
if l.attrib['type'] == \
'application/vnd.vmware.vcloud.orgNetwork+xml' \
and l.attrib['name'] == network_name:
network_href = l.attrib['href']
if network_href is None:
raise ValueError(
'%s is not a valid organisation network name' % network_name)
else:
return network_href
def _get_vm_elements(self, vapp_or_vm_id):
res = self.connection.request(get_url_path(vapp_or_vm_id))
if res.object.tag.endswith('VApp'):
vms = res.object.findall(fixxpath(res.object, 'Children/Vm'))
elif res.object.tag.endswith('Vm'):
vms = [res.object]
else:
raise ValueError(
'Specified ID value is not a valid VApp or Vm identifier.')
return vms
def _is_node(self, node_or_image):
return isinstance(node_or_image, Node)
def _to_node(self, node_elm):
# Parse VMs as extra field
vms = []
for vm_elem in node_elm.findall(fixxpath(node_elm, 'Children/Vm')):
public_ips = []
private_ips = []
xpath = fixxpath(vm_elem,
'NetworkConnectionSection/NetworkConnection')
for connection in vm_elem.findall(xpath):
ip = connection.find(fixxpath(connection, "IpAddress"))
if ip is not None:
private_ips.append(ip.text)
external_ip = connection.find(
fixxpath(connection, "ExternalIpAddress"))
if external_ip is not None:
public_ips.append(external_ip.text)
elif ip is not None:
public_ips.append(ip.text)
xpath = ('{http://schemas.dmtf.org/ovf/envelope/1}'
'OperatingSystemSection')
os_type_elem = vm_elem.find(xpath)
if os_type_elem is not None:
os_type = os_type_elem.get(
'{http://www.vmware.com/schema/ovf}osType')
else:
os_type = None
vm = {
'id': vm_elem.get('href'),
'name': vm_elem.get('name'),
'state': self.NODE_STATE_MAP[vm_elem.get('status')],
'public_ips': public_ips,
'private_ips': private_ips,
'os_type': os_type
}
vms.append(vm)
# Take the node IP addresses from all VMs
public_ips = []
private_ips = []
for vm in vms:
public_ips.extend(vm['public_ips'])
private_ips.extend(vm['private_ips'])
# Find vDC
vdc_id = next(link.get('href') for link
in node_elm.findall(fixxpath(node_elm, 'Link'))
if link.get('type') ==
'application/vnd.vmware.vcloud.vdc+xml')
vdc = next(vdc for vdc in self.vdcs if vdc.id == vdc_id)
node = Node(id=node_elm.get('href'),
name=node_elm.get('name'),
state=self.NODE_STATE_MAP[node_elm.get('status')],
public_ips=public_ips,
private_ips=private_ips,
driver=self.connection.driver,
extra={'vdc': vdc.name, 'vms': vms})
return node
def _to_vdc(self, vdc_elm):
def get_capacity_values(capacity_elm):
if capacity_elm is None:
return None
limit = int(capacity_elm.findtext(fixxpath(capacity_elm, 'Limit')))
used = int(capacity_elm.findtext(fixxpath(capacity_elm, 'Used')))
units = capacity_elm.findtext(fixxpath(capacity_elm, 'Units'))
return Capacity(limit, used, units)
cpu = get_capacity_values(
vdc_elm.find(fixxpath(vdc_elm, 'ComputeCapacity/Cpu')))
memory = get_capacity_values(
vdc_elm.find(fixxpath(vdc_elm, 'ComputeCapacity/Memory')))
storage = get_capacity_values(
vdc_elm.find(fixxpath(vdc_elm, 'StorageCapacity')))
return Vdc(id=vdc_elm.get('href'),
name=vdc_elm.get('name'),
driver=self,
allocation_model=vdc_elm.findtext(
fixxpath(vdc_elm, 'AllocationModel')),
cpu=cpu,
memory=memory,
storage=storage)
class VCloud_5_1_NodeDriver(VCloud_1_5_NodeDriver):
@staticmethod
def _validate_vm_memory(vm_memory):
if vm_memory is None:
return None
elif (vm_memory % 4) != 0:
# The vcd 5.1 virtual machine memory size must be a multiple of 4
# MB
raise ValueError(
'%s is not a valid vApp VM memory value' % (vm_memory))
|
VisTrails/VisTrails
|
refs/heads/v2.2
|
vistrails/gui/debug.py
|
2
|
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
import cgi
import logging
from PyQt4 import QtCore, QtGui
from vistrails.core.configuration import get_vistrails_configuration
import vistrails.core.debug
from vistrails.gui.application import get_vistrails_application
from vistrails.gui.common_widgets import QDockPushButton
from vistrails.gui.theme import CurrentTheme
import vistrails.gui.utils
from vistrails.gui.vistrails_palette import QVistrailsPaletteInterface
################################################################################
class DebugView(QtGui.QWidget, QVistrailsPaletteInterface):
""" Class used for showing error messages and
debugging QT signals.
Example of usage:
import gui.debug
gui.debug.watch_signal(my_signal)
"""
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
ui = logging.StreamHandler(debugStream(self.write))
ui.setFormatter(logging.Formatter(
'%(levelname)s\n%(asctime)s\n%(message)s'))
ui.setLevel(logging.DEBUG)
vistrails.core.debug.DebugPrint.getInstance().logger.addHandler(ui)
self.setWindowTitle('VisTrails Messages')
layout = QtGui.QVBoxLayout()
self.setLayout(layout)
# top message filter buttons
filters = QtGui.QHBoxLayout()
layout.addLayout(filters)
filterLabel = QtGui.QLabel('Filter:')
filterLabel.setFixedWidth(40)
filters.addWidget(filterLabel)
self.levels = {}
for i, name in enumerate(('DEBUG', 'INFO', 'WARNING', 'CRITICAL')):
box = QtGui.QCheckBox(name, self)
box.setCheckable(True)
box.setChecked(name != 'DEBUG')
box.setStyleSheet(
'color: %s;\n'
'background-color: %s' % (
CurrentTheme.DEBUG_COLORS[name].name(),
CurrentTheme.DEBUG_FILTER_BACKGROUND_COLOR.name()))
self.connect(box, QtCore.SIGNAL('toggled(bool)'), self.refresh)
filters.addWidget(box)
self.levels[name] = box
filters.addStretch()
# message list
self.list = QtGui.QListWidget()
self.connect(self.list,
QtCore.SIGNAL('currentItemChanged(QListWidgetItem *, QListWidgetItem *)'),
self.showMessage)
layout.addWidget(self.list)
# message details field
self.text = QtGui.QTextEdit()
self.text.setReadOnly(True)
self.text.hide()
layout.addWidget(self.text)
# bottom buttons
buttons = QtGui.QGridLayout()
layout.addLayout(buttons)
leftbuttons = QtGui.QGridLayout()
buttons.addLayout(leftbuttons, 0, 0, QtCore.Qt.AlignLeft)
rightbuttons = QtGui.QGridLayout()
buttons.addLayout(rightbuttons, 0, 1, QtCore.Qt.AlignRight)
copy = QDockPushButton('Copy &Message', self)
copy.setToolTip('Copy selected message to clipboard')
copy.setFixedWidth(125)
rightbuttons.addWidget(copy, 0, 0)
self.connect(copy, QtCore.SIGNAL('clicked()'),
self.copyMessage)
copyAll = QDockPushButton('Copy &All', self)
copyAll.setToolTip('Copy all messages to clipboard (Can be a lot)')
copyAll.setFixedWidth(125)
rightbuttons.addWidget(copyAll, 0, 1)
self.connect(copyAll, QtCore.SIGNAL('clicked()'),
self.copyAll)
self.msg_box = None
self.itemQueue = []
self.resize(700, 400)
def refresh(self):
for i in xrange(self.list.count()):
item = self.list.item(i)
level = item.data(32).split('\n')[0]
self.list.setItemHidden(item, not self.levels[level].isChecked())
def copyMessage(self):
""" copy selected message to clipboard """
items = self.list.selectedItems()
if len(items)>0:
text = items[0].data(32)
get_vistrails_application().clipboard().setText(text)
def copyAll(self):
""" copy all messages to clipboard """
texts = []
for i in range(self.list.count()):
texts.append(self.list.item(i).data(32))
text = '\n'.join(texts)
get_vistrails_application().clipboard().setText(text)
def showMessage(self, item, olditem):
""" show item data in a messagebox """
s = item.data(32)
msgs = s.split('\n')
msgs = [cgi.escape(i) for i in msgs]
format = {'INFO': 'Message:',
'WARNING': 'Warning message:',
'CRITICAL': 'Critical message:'}
text = '<HTML><BODY BGCOLOR="#FFFFFF">'
text += '<H4>%s</H4>' % format.get(msgs[0], 'Message:')
text += '<H4>%s<br></H4>' % msgs[3]
text += '<table border="0">'
if len(msgs)>4:
text += '<tr><td> </td><td align=left>%s</td></tr>' % '<br>'.join(msgs[4:])
text += '<tr><td> </td><td> </td></tr>'
text += '<tr><td align=right><b>Time:</b></td><td>%s</td></tr>' % msgs[1]
text += '<tr><td align=right><b>Location:</b></td><td>%s</td></tr>' % msgs[2]
text += '</table></BODY></HTML>'
self.text.setHtml(text)
self.text.show()
def watch_signal(self, obj, sig):
"""self.watch_signal(QObject, QSignal) -> None. Connects a debugging
call to a signal so that every time signal is emitted, it gets
registered on the log.
"""
self.connect(obj, sig, self.__debugSignal)
def __debugSignal(self, *args):
""" Receives debug signal """
debug(str(args))
def updateMessageBox(self, item):
self.currentItem = item
msg_box = self.msg_box
# update messagebox with data from item
s = item.data(32)
msgs = s.split('\n')
if msgs[0] == "INFO":
msg_box.setIcon(QtGui.QMessageBox.Information)
msg_box.setWindowTitle("Information")
elif msgs[0] == "WARNING":
msg_box.setIcon(QtGui.QMessageBox.Warning)
msg_box.setWindowTitle("Warning")
elif msgs[0] == "CRITICAL":
msg_box.setIcon(QtGui.QMessageBox.Critical)
msg_box.setWindowTitle("Critical error")
msg_box.setText(msgs[3])
def showMessageBox(self, item):
""" Displays the current message in a messagebox
if a message is already shown the same message is shown again
but with a "next message"-button
"""
msg_box = self.msg_box
if not msg_box or not msg_box.isVisible():
# create messagebox
# app segfaults if the handle to the old messagebox is removed
self.old_msg_box = msg_box
msg_box = QtGui.QMessageBox(self.parent())
self.msg_box = msg_box
msg_box.setStandardButtons(QtGui.QMessageBox.Ok)
msg_box.setDefaultButton(QtGui.QMessageBox.Ok)
msg_box.setEscapeButton(QtGui.QMessageBox.Ok)
msg_box.addButton('&Show Messages', msg_box.RejectRole)
self.manyButton = None
self.connect(msg_box,
QtCore.SIGNAL('buttonClicked(QAbstractButton *)'),
self.messageButtonClicked)
self.connect(msg_box,
QtCore.SIGNAL('rejected()'),
self.rejectMessage)
self.updateMessageBox(item)
else:
self.itemQueue.append(item)
# check queue
if self.itemQueue:
# need to set nextmessage-button
many = len(self.itemQueue)
text = '&Next Message (%s more)' % many
if not self.manyButton:
# create button
self.manyButton=QtGui.QPushButton(text)
msg_box.addButton(self.manyButton, msg_box.DestructiveRole)
else:
self.manyButton.setText(text)
else:
# remove button if it exist
if self.manyButton:
msg_box.removeButton(self.manyButton)
self.manyButton = None
if not msg_box.isVisible():
msg_box.show()
msg_box.resize(msg_box.sizeHint())
msg_box.updateGeometry()
msg_box.activateWindow()
msg_box.raise_()
def messageButtonClicked(self, button):
role = self.msg_box.buttonRole(button)
if role == self.msg_box.RejectRole:
self.itemQueue = []
self.set_visible(True)
self.list.setCurrentItem(self.currentItem)
self.list.scrollToItem(self.currentItem)
elif role == self.msg_box.DestructiveRole:
# show next message
item = self.itemQueue[0]
del self.itemQueue[0]
self.showMessageBox(item)
else:
self.itemQueue = []
def write(self, s):
"""write(s) -> None
adds the string s to the message list and displays it
"""
# adds the string s to the list and
s = s.strip()
msgs = s.split('\n')
if len(msgs)<=3:
msgs.append('Error logging message: invalid log format')
s += '\n' + msgs[3]
if not len(msgs[3].strip()):
msgs[3] = "Unknown Error"
s = '\n'.join(msgs)
text = msgs[3]
item = QtGui.QListWidgetItem(text)
item.setData(32, s)
item.setFlags(item.flags()&~QtCore.Qt.ItemIsEditable)
self.list.addItem(item)
item.setForeground(CurrentTheme.DEBUG_COLORS[msgs[0]])
self.list.setItemHidden(item, not self.levels[msgs[0]].isChecked())
alwaysShowDebugPopup = getattr(get_vistrails_configuration(),
'showDebugPopups',
False)
if msgs[0] == 'CRITICAL':
if self.isVisible() and not alwaysShowDebugPopup:
self.raise_()
self.activateWindow()
modal = get_vistrails_application().activeModalWidget()
if modal:
# need to beat modal window
self.showMessageBox(item)
else:
self.showMessageBox(item)
def closeEvent(self, e):
"""closeEvent(e) -> None
Event handler called when the dialog is about to close."""
self.emit(QtCore.SIGNAL("messagesView(bool)"), False)
def showEvent(self, e):
"""closeEvent(e) -> None
Event handler called when the dialog is about to close."""
self.emit(QtCore.SIGNAL("messagesView(bool)"), True)
def reject(self):
""" Captures Escape key and closes window correctly """
self.close()
def rejectMessage(self):
""" Captures Escape key and closes messageBox correctly """
self.itemQueue = []
self.msg_box.close()
class debugStream(object):
def __init__(self, write):
self._write = write
def write(self, *args, **kwargs):
return self._write(*args, **kwargs)
def watch_signal(obj, sig):
DebugView.getInstance().watch_signal(obj, sig)
critical = vistrails.core.debug.critical
warning = vistrails.core.debug.warning
log = vistrails.core.debug.log
debug = vistrails.core.debug.debug
class TestDebugView(vistrails.gui.utils.TestVisTrailsGUI):
def test_messages(self):
debugview = DebugView.instance()
# test message types
examples = ["INFO\ntime\nplace\nShort test message\n"
"Full test message\nmulti-line",
"INFO\ntime\nplace\nShort test message only",
"INFO\ntime\nplace\n", # empty message
"INFO\ntime\nplace" # no message
]
examples += ["%s\ntime\nplace\nShort test message\nFull test message"\
% m for m in ['INFO', 'WARNING', 'CRITICAL', 'DEBUG']]
for m in examples:
debugview.write(m)
item = debugview.list.item(debugview.list.count()-1)
debugview.showMessageBox(item)
# test message copying
debugview.copyMessage()
debugview.copyAll()
# test button toggling
debugview.levels['INFO'].setChecked(False)
debugview.levels['INFO'].setChecked(True)
debugview.levels['WARNING'].setChecked(False)
debugview.levels['WARNING'].setChecked(True)
debugview.levels['CRITICAL'].setChecked(False)
debugview.levels['CRITICAL'].setChecked(True)
|
maas/maas
|
refs/heads/master
|
src/maascli/auth.py
|
1
|
# Copyright 2012-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""MAAS CLI authentication."""
from getpass import getpass
import http.client
import sys
from urllib.parse import urljoin
from macaroonbakery import httpbakery
from apiclient.creds import convert_string_to_tuple
from maascli.api import Action, http_request
class UnexpectedResponse(Exception):
"""Unexpected API response."""
def try_getpass(prompt):
"""Call `getpass`, ignoring EOF errors."""
try:
return getpass(prompt)
except EOFError:
return None
def get_apikey_via_macaroon(url):
"""Try to get an API key using a macaroon.
httpbakery is used to create a new API token. If the MAAS server supports
macaroons, it will reply that a macaroon discharge is required, and bakery
will send the user to Candid for authentication, and then call the API
again with the acquired macaroon.
If the MAAS server doesn't support macaroons, None is returned.
"""
url = url.strip("/")
client = httpbakery.Client()
resp = client.request(
"POST", "{}/account/?op=create_authorisation_token".format(url)
)
if resp.status_code != 200:
# Most likely the MAAS server doesn't support macaroons.
return None
result = resp.json()
return "{consumer_key}:{token_key}:{token_secret}".format(**result)
def obtain_credentials(url, credentials):
"""Prompt for credentials if possible.
If the credentials are "-" then read from stdin without interactive
prompting.
"""
if credentials == "-":
credentials = sys.stdin.readline().strip()
elif credentials is None:
credentials = get_apikey_via_macaroon(url)
if credentials is None:
credentials = try_getpass(
"API key (leave empty for anonymous access): "
)
# Ensure that the credentials have a valid form.
if credentials and not credentials.isspace():
return convert_string_to_tuple(credentials)
else:
return None
def check_valid_apikey(url, credentials, insecure=False):
"""Check for valid apikey.
:param credentials: A 3-tuple of credentials.
"""
if "/api/1.0" in url:
check_url = urljoin(url, "nodegroups/")
uri, body, headers = Action.prepare_payload(
op="list", method="GET", uri=check_url, data=[]
)
else:
check_url = urljoin(url, "users/")
uri, body, headers = Action.prepare_payload(
op="whoami", method="GET", uri=check_url, data=[]
)
# Headers are returned as a list, but they must be a dict for
# the signing machinery.
headers = dict(headers)
Action.sign(uri, headers, credentials)
response, content = http_request(
uri, method="GET", body=body, headers=headers, insecure=insecure
)
status = int(response["status"])
if status == http.client.UNAUTHORIZED:
return False
elif status == http.client.OK:
return True
else:
raise UnexpectedResponse(
"The MAAS server gave an unexpected response: %s" % status
)
|
chengjf/database-interface-doc-management
|
refs/heads/master
|
flask-demo/flask/Lib/encodings/gb18030.py
|
816
|
#
# gb18030.py: Python Unicode Codec for GB18030
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_cn, codecs
import _multibytecodec as mbc
codec = _codecs_cn.getcodec('gb18030')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='gb18030',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
ryfeus/lambda-packs
|
refs/heads/master
|
Selenium_Chromium/source/setuptools/command/py36compat.py
|
286
|
import os
from glob import glob
from distutils.util import convert_path
from distutils.command import sdist
from setuptools.extern.six.moves import filter
class sdist_add_defaults:
"""
Mix-in providing forward-compatibility for functionality as found in
distutils on Python 3.7.
Do not edit the code in this class except to update functionality
as implemented in distutils. Instead, override in the subclass.
"""
def add_defaults(self):
"""Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
"""
self._add_defaults_standards()
self._add_defaults_optional()
self._add_defaults_python()
self._add_defaults_data_files()
self._add_defaults_ext()
self._add_defaults_c_libs()
self._add_defaults_scripts()
@staticmethod
def _cs_path_exists(fspath):
"""
Case-sensitive path existence check
>>> sdist_add_defaults._cs_path_exists(__file__)
True
>>> sdist_add_defaults._cs_path_exists(__file__.upper())
False
"""
if not os.path.exists(fspath):
return False
# make absolute so we always have a directory
abspath = os.path.abspath(fspath)
directory, filename = os.path.split(abspath)
return filename in os.listdir(directory)
def _add_defaults_standards(self):
standards = [self.READMES, self.distribution.script_name]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = False
for fn in alts:
if self._cs_path_exists(fn):
got_it = True
self.filelist.append(fn)
break
if not got_it:
self.warn("standard file not found: should have one of " +
', '.join(alts))
else:
if self._cs_path_exists(fn):
self.filelist.append(fn)
else:
self.warn("standard file '%s' not found" % fn)
def _add_defaults_optional(self):
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = filter(os.path.isfile, glob(pattern))
self.filelist.extend(files)
def _add_defaults_python(self):
# build_py is used to get:
# - python modules
# - files defined in package_data
build_py = self.get_finalized_command('build_py')
# getting python files
if self.distribution.has_pure_modules():
self.filelist.extend(build_py.get_source_files())
# getting package_data files
# (computed in build_py.data_files by build_py.finalize_options)
for pkg, src_dir, build_dir, filenames in build_py.data_files:
for filename in filenames:
self.filelist.append(os.path.join(src_dir, filename))
def _add_defaults_data_files(self):
# getting distribution.data_files
if self.distribution.has_data_files():
for item in self.distribution.data_files:
if isinstance(item, str):
# plain file
item = convert_path(item)
if os.path.isfile(item):
self.filelist.append(item)
else:
# a (dirname, filenames) tuple
dirname, filenames = item
for f in filenames:
f = convert_path(f)
if os.path.isfile(f):
self.filelist.append(f)
def _add_defaults_ext(self):
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
self.filelist.extend(build_ext.get_source_files())
def _add_defaults_c_libs(self):
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.filelist.extend(build_clib.get_source_files())
def _add_defaults_scripts(self):
if self.distribution.has_scripts():
build_scripts = self.get_finalized_command('build_scripts')
self.filelist.extend(build_scripts.get_source_files())
if hasattr(sdist.sdist, '_add_defaults_standards'):
# disable the functionality already available upstream
class sdist_add_defaults:
pass
|
felixbade/ip
|
refs/heads/master
|
app/views.py
|
1
|
from flask import request, Response
from app import app
@app.route('/')
def index():
resp = request.headers.environ['HTTP_X_REAL_IP'] + '\n'
return Response(resp, mimetype='text/plain')
|
edx/edx-platform
|
refs/heads/master
|
lms/djangoapps/courseware/module_render.py
|
3
|
"""
Module rendering
"""
import json
import logging
import textwrap
from collections import OrderedDict
from functools import partial
from completion.waffle import ENABLE_COMPLETION_TRACKING_SWITCH
from completion.models import BlockCompletion
from django.conf import settings
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django.core.cache import cache
from django.db import transaction
from django.http import Http404, HttpResponse, HttpResponseForbidden
from django.middleware.csrf import CsrfViewMiddleware
from django.template.context_processors import csrf
from django.urls import reverse
from django.utils.text import slugify
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.csrf import csrf_exempt
from edx_django_utils.cache import RequestCache
from edx_django_utils.monitoring import set_custom_attributes_for_course_key, set_monitoring_transaction_name
from edx_proctoring.api import get_attempt_status_summary
from edx_proctoring.services import ProctoringService
from edx_rest_framework_extensions.auth.jwt.authentication import JwtAuthentication
from edx_when.field_data import DateLookupFieldData
from eventtracking import tracker
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from requests.auth import HTTPBasicAuth
from rest_framework.decorators import api_view
from rest_framework.exceptions import APIException
from web_fragments.fragment import Fragment
from xblock.core import XBlock
from xblock.django.request import django_to_webob_request, webob_to_django_response
from xblock.exceptions import NoSuchHandlerError, NoSuchViewError
from xblock.reference.plugins import FSService
from xblock.runtime import KvsFieldData
from common.djangoapps import static_replace
from capa.xqueue_interface import XQueueInterface
from lms.djangoapps.courseware.access import get_user_role, has_access
from lms.djangoapps.courseware.entrance_exams import user_can_skip_entrance_exam, user_has_passed_entrance_exam
from lms.djangoapps.courseware.masquerade import (
MasqueradingKeyValueStore,
filter_displayed_blocks,
is_masquerading_as_specific_student,
setup_masquerade
)
from lms.djangoapps.courseware.model_data import DjangoKeyValueStore, FieldDataCache
from common.djangoapps.edxmako.shortcuts import render_to_string
from lms.djangoapps.courseware.field_overrides import OverrideFieldData
from lms.djangoapps.courseware.services import UserStateService
from lms.djangoapps.grades.api import GradesUtilService
from lms.djangoapps.grades.api import signals as grades_signals
from lms.djangoapps.lms_xblock.field_data import LmsFieldData
from lms.djangoapps.lms_xblock.models import XBlockAsidesConfig
from lms.djangoapps.lms_xblock.runtime import LmsModuleSystem
from lms.djangoapps.verify_student.services import XBlockVerificationService
from openedx.core.djangoapps.bookmarks.services import BookmarksService
from openedx.core.djangoapps.crawlers.models import CrawlersConfig
from openedx.core.djangoapps.credit.services import CreditService
from openedx.core.djangoapps.util.user_utils import SystemUser
from openedx.core.djangolib.markup import HTML
from openedx.core.lib.api.authentication import BearerAuthenticationAllowInactiveUser
from openedx.core.lib.api.view_utils import view_auth_classes
from openedx.core.lib.gating.services import GatingService
from openedx.core.lib.license import wrap_with_license
from openedx.core.lib.url_utils import quote_slashes, unquote_slashes
from openedx.core.lib.xblock_utils import (
add_staff_markup,
get_aside_from_xblock,
hash_resource,
is_xblock_aside,
replace_course_urls,
replace_jump_to_id_urls,
replace_static_urls
)
from openedx.core.lib.xblock_utils import request_token as xblock_request_token
from openedx.core.lib.xblock_utils import wrap_xblock
from openedx.features.course_duration_limits.access import course_expiration_wrapper
from openedx.features.discounts.utils import offer_banner_wrapper
from openedx.features.content_type_gating.services import ContentTypeGatingService
from common.djangoapps.student.models import anonymous_id_for_user, user_by_anonymous_id
from common.djangoapps.student.roles import CourseBetaTesterRole
from common.djangoapps.track import contexts
from common.djangoapps.util import milestones_helpers
from common.djangoapps.util.json_request import JsonResponse
from common.djangoapps.xblock_django.user_service import DjangoXBlockUserService
from xmodule.contentstore.django import contentstore
from xmodule.error_module import ErrorBlock, NonStaffErrorBlock
from xmodule.exceptions import NotFoundError, ProcessingError
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.util.sandboxing import can_execute_unsafe_code, get_python_lib_zip
from xmodule.x_module import XModuleDescriptor
log = logging.getLogger(__name__)
if settings.XQUEUE_INTERFACE.get('basic_auth') is not None:
REQUESTS_AUTH = HTTPBasicAuth(*settings.XQUEUE_INTERFACE['basic_auth'])
else:
REQUESTS_AUTH = None
XQUEUE_INTERFACE = XQueueInterface(
settings.XQUEUE_INTERFACE['url'],
settings.XQUEUE_INTERFACE['django_auth'],
REQUESTS_AUTH,
)
# TODO: course_id and course_key are used interchangeably in this file, which is wrong.
# Some brave person should make the variable names consistently someday, but the code's
# coupled enough that it's kind of tricky--you've been warned!
class LmsModuleRenderError(Exception):
"""
An exception class for exceptions thrown by module_render that don't fit well elsewhere
"""
pass # lint-amnesty, pylint: disable=unnecessary-pass
def make_track_function(request):
'''
Make a tracking function that logs what happened.
For use in ModuleSystem.
'''
from common.djangoapps.track import views as track_views
def function(event_type, event):
return track_views.server_track(request, event_type, event, page='x_module')
return function
def toc_for_course(user, request, course, active_chapter, active_section, field_data_cache):
'''
Create a table of contents from the module store
Return format:
{ 'chapters': [
{'display_name': name, 'url_name': url_name, 'sections': SECTIONS, 'active': bool},
],
'previous_of_active_section': {..},
'next_of_active_section': {..}
}
where SECTIONS is a list
[ {'display_name': name, 'url_name': url_name,
'format': format, 'due': due, 'active' : bool, 'graded': bool}, ...]
where previous_of_active_section and next_of_active_section have information on the
next/previous sections of the active section.
active is set for the section and chapter corresponding to the passed
parameters, which are expected to be url_names of the chapter+section.
Everything else comes from the xml, or defaults to "".
chapters with name 'hidden' are skipped.
NOTE: assumes that if we got this far, user has access to course. Returns
None if this is not the case.
field_data_cache must include data from the course module and 2 levels of its descendants
'''
with modulestore().bulk_operations(course.id):
course_module = get_module_for_descriptor(
user, request, course, field_data_cache, course.id, course=course
)
if course_module is None:
return None, None, None
toc_chapters = list()
chapters = course_module.get_display_items()
# Check for content which needs to be completed
# before the rest of the content is made available
required_content = milestones_helpers.get_required_content(course.id, user)
# The user may not actually have to complete the entrance exam, if one is required
if user_can_skip_entrance_exam(user, course):
required_content = [content for content in required_content if not content == course.entrance_exam_id]
previous_of_active_section, next_of_active_section = None, None
last_processed_section, last_processed_chapter = None, None
found_active_section = False
for chapter in chapters:
# Only show required content, if there is required content
# chapter.hide_from_toc is read-only (bool)
# xss-lint: disable=python-deprecated-display-name
display_id = slugify(chapter.display_name_with_default_escaped)
local_hide_from_toc = False
if required_content:
if str(chapter.location) not in required_content:
local_hide_from_toc = True
# Skip the current chapter if a hide flag is tripped
if chapter.hide_from_toc or local_hide_from_toc:
continue
sections = list()
for section in chapter.get_display_items():
# skip the section if it is hidden from the user
if section.hide_from_toc:
continue
is_section_active = (chapter.url_name == active_chapter and section.url_name == active_section)
if is_section_active:
found_active_section = True
section_context = {
# xss-lint: disable=python-deprecated-display-name
'display_name': section.display_name_with_default_escaped,
'url_name': section.url_name,
'format': section.format if section.format is not None else '',
'due': section.due,
'active': is_section_active,
'graded': section.graded,
}
_add_timed_exam_info(user, course, section, section_context)
# update next and previous of active section, if applicable
if is_section_active:
if last_processed_section:
previous_of_active_section = last_processed_section.copy()
previous_of_active_section['chapter_url_name'] = last_processed_chapter.url_name
elif found_active_section and not next_of_active_section:
next_of_active_section = section_context.copy()
next_of_active_section['chapter_url_name'] = chapter.url_name
sections.append(section_context)
last_processed_section = section_context
last_processed_chapter = chapter
toc_chapters.append({
# xss-lint: disable=python-deprecated-display-name
'display_name': chapter.display_name_with_default_escaped,
'display_id': display_id,
'url_name': chapter.url_name,
'sections': sections,
'active': chapter.url_name == active_chapter
})
return {
'chapters': toc_chapters,
'previous_of_active_section': previous_of_active_section,
'next_of_active_section': next_of_active_section,
}
def _add_timed_exam_info(user, course, section, section_context):
"""
Add in rendering context if exam is a timed exam (which includes proctored)
"""
section_is_time_limited = (
getattr(section, 'is_time_limited', False) and
settings.FEATURES.get('ENABLE_SPECIAL_EXAMS', False)
)
if section_is_time_limited:
# call into edx_proctoring subsystem
# to get relevant proctoring information regarding this
# level of the courseware
#
# This will return None, if (user, course_id, content_id)
# is not applicable
timed_exam_attempt_context = None
try:
timed_exam_attempt_context = get_attempt_status_summary(
user.id,
str(course.id),
str(section.location)
)
except Exception as ex: # pylint: disable=broad-except
# safety net in case something blows up in edx_proctoring
# as this is just informational descriptions, it is better
# to log and continue (which is safe) than to have it be an
# unhandled exception
log.exception(ex)
if timed_exam_attempt_context:
# yes, user has proctoring context about
# this level of the courseware
# so add to the accordion data context
section_context.update({
'proctoring': timed_exam_attempt_context,
})
def get_module(user, request, usage_key, field_data_cache,
position=None, log_if_not_found=True, wrap_xmodule_display=True,
grade_bucket_type=None, depth=0,
static_asset_path='', course=None, will_recheck_access=False):
"""
Get an instance of the xmodule class identified by location,
setting the state based on an existing StudentModule, or creating one if none
exists.
Arguments:
- user : User for whom we're getting the module
- request : current django HTTPrequest. Note: request.user isn't used for anything--all auth
and such works based on user.
- usage_key : A UsageKey object identifying the module to load
- field_data_cache : a FieldDataCache
- position : extra information from URL for user-specified
position within module
- log_if_not_found : If this is True, we log a debug message if we cannot find the requested xmodule.
- wrap_xmodule_display : If this is True, wrap the output display in a single div to allow for the
XModule javascript to be bound correctly
- depth : number of levels of descendents to cache when loading this module.
None means cache all descendents
- static_asset_path : static asset path to use (overrides descriptor's value); needed
by get_course_info_section, because info section modules
do not have a course as the parent module, and thus do not
inherit this lms key value.
- will_recheck_access : If True, the caller commits to re-checking access on each child XBlock
before rendering the content in order to display access error messages
to the user.
Returns: xmodule instance, or None if the user does not have access to the
module. If there's an error, will try to return an instance of ErrorBlock
if possible. If not possible, return None.
"""
try:
descriptor = modulestore().get_item(usage_key, depth=depth)
return get_module_for_descriptor(user, request, descriptor, field_data_cache, usage_key.course_key,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
course=course, will_recheck_access=will_recheck_access)
except ItemNotFoundError:
if log_if_not_found:
log.debug("Error in get_module: ItemNotFoundError")
return None
except: # pylint: disable=W0702
# Something has gone terribly wrong, but still not letting it turn into a 500.
log.exception("Error in get_module")
return None
def display_access_messages(user, block, view, frag, context): # pylint: disable=W0613
"""
An XBlock wrapper that replaces the content fragment with a fragment or message determined by
the has_access check.
"""
blocked_prior_sibling = RequestCache('display_access_messages_prior_sibling')
load_access = has_access(user, 'load', block, block.scope_ids.usage_id.course_key)
if load_access:
blocked_prior_sibling.delete(block.parent)
return frag
prior_sibling = blocked_prior_sibling.get_cached_response(block.parent)
if prior_sibling.is_found and prior_sibling.value.error_code == load_access.error_code:
return Fragment("")
else:
blocked_prior_sibling.set(block.parent, load_access)
if load_access.user_fragment:
msg_fragment = load_access.user_fragment
elif load_access.user_message:
msg_fragment = Fragment(textwrap.dedent(HTML("""\
<div>{}</div>
""").format(load_access.user_message)))
else:
msg_fragment = Fragment("")
if load_access.developer_message and has_access(user, 'staff', block, block.scope_ids.usage_id.course_key):
msg_fragment.content += textwrap.dedent(HTML("""\
<div>{}</div>
""").format(load_access.developer_message))
return msg_fragment
def get_xqueue_callback_url_prefix(request):
"""
Calculates default prefix based on request, but allows override via settings
This is separated from get_module_for_descriptor so that it can be called
by the LMS before submitting background tasks to run. The xqueue callbacks
should go back to the LMS, not to the worker.
"""
prefix = '{proto}://{host}'.format(
proto=request.META.get('HTTP_X_FORWARDED_PROTO', 'https' if request.is_secure() else 'http'),
host=request.get_host()
)
return settings.XQUEUE_INTERFACE.get('callback_url', prefix)
# pylint: disable=too-many-statements
def get_module_for_descriptor(user, request, descriptor, field_data_cache, course_key,
position=None, wrap_xmodule_display=True, grade_bucket_type=None,
static_asset_path='', disable_staff_debug_info=False,
course=None, will_recheck_access=False):
"""
Implements get_module, extracting out the request-specific functionality.
disable_staff_debug_info : If this is True, exclude staff debug information in the rendering of the module.
See get_module() docstring for further details.
"""
track_function = make_track_function(request)
xqueue_callback_url_prefix = get_xqueue_callback_url_prefix(request)
user_location = getattr(request, 'session', {}).get('country_code')
student_kvs = DjangoKeyValueStore(field_data_cache)
if is_masquerading_as_specific_student(user, course_key):
student_kvs = MasqueradingKeyValueStore(student_kvs, request.session)
student_data = KvsFieldData(student_kvs)
return get_module_for_descriptor_internal(
user=user,
descriptor=descriptor,
student_data=student_data,
course_id=course_key,
track_function=track_function,
xqueue_callback_url_prefix=xqueue_callback_url_prefix,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
user_location=user_location,
request_token=xblock_request_token(request),
disable_staff_debug_info=disable_staff_debug_info,
course=course,
will_recheck_access=will_recheck_access,
)
def get_module_system_for_user(
user,
student_data, # TODO
# Arguments preceding this comment have user binding, those following don't
descriptor,
course_id,
track_function,
xqueue_callback_url_prefix,
request_token,
position=None,
wrap_xmodule_display=True,
grade_bucket_type=None,
static_asset_path='',
user_location=None,
disable_staff_debug_info=False,
course=None,
will_recheck_access=False,
):
"""
Helper function that returns a module system and student_data bound to a user and a descriptor.
The purpose of this function is to factor out everywhere a user is implicitly bound when creating a module,
to allow an existing module to be re-bound to a user. Most of the user bindings happen when creating the
closures that feed the instantiation of ModuleSystem.
The arguments fall into two categories: those that have explicit or implicit user binding, which are user
and student_data, and those don't and are just present so that ModuleSystem can be instantiated, which
are all the other arguments. Ultimately, this isn't too different than how get_module_for_descriptor_internal
was before refactoring.
Arguments:
see arguments for get_module()
request_token (str): A token unique to the request use by xblock initialization
Returns:
(LmsModuleSystem, KvsFieldData): (module system, student_data) bound to, primarily, the user and descriptor
"""
def make_xqueue_callback(dispatch='score_update'):
"""
Returns fully qualified callback URL for external queueing system
"""
relative_xqueue_callback_url = reverse(
'xqueue_callback',
kwargs=dict(
course_id=str(course_id),
userid=str(user.id),
mod_id=str(descriptor.location),
dispatch=dispatch
),
)
return xqueue_callback_url_prefix + relative_xqueue_callback_url
# Default queuename is course-specific and is derived from the course that
# contains the current module.
# TODO: Queuename should be derived from 'course_settings.json' of each course
xqueue_default_queuename = descriptor.location.org + '-' + descriptor.location.course
xqueue = {
'interface': XQUEUE_INTERFACE,
'construct_callback': make_xqueue_callback,
'default_queuename': xqueue_default_queuename.replace(' ', '_'),
'waittime': settings.XQUEUE_WAITTIME_BETWEEN_REQUESTS
}
def inner_get_module(descriptor):
"""
Delegate to get_module_for_descriptor_internal() with all values except `descriptor` set.
Because it does an access check, it may return None.
"""
# TODO: fix this so that make_xqueue_callback uses the descriptor passed into
# inner_get_module, not the parent's callback. Add it as an argument....
return get_module_for_descriptor_internal(
user=user,
descriptor=descriptor,
student_data=student_data,
course_id=course_id,
track_function=track_function,
xqueue_callback_url_prefix=xqueue_callback_url_prefix,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
user_location=user_location,
request_token=request_token,
course=course,
will_recheck_access=will_recheck_access,
)
def get_event_handler(event_type):
"""
Return an appropriate function to handle the event.
Returns None if no special processing is required.
"""
handlers = {
'grade': handle_grade_event,
}
if ENABLE_COMPLETION_TRACKING_SWITCH.is_enabled():
handlers.update({
'completion': handle_completion_event,
'progress': handle_deprecated_progress_event,
})
return handlers.get(event_type)
def publish(block, event_type, event):
"""
A function that allows XModules to publish events.
"""
handle_event = get_event_handler(event_type)
if handle_event and not is_masquerading_as_specific_student(user, course_id):
handle_event(block, event)
else:
context = contexts.course_context_from_course_id(course_id)
if block.runtime.user_id:
context['user_id'] = block.runtime.user_id
context['asides'] = {}
for aside in block.runtime.get_asides(block):
if hasattr(aside, 'get_event_context'):
aside_event_info = aside.get_event_context(event_type, event)
if aside_event_info is not None:
context['asides'][aside.scope_ids.block_type] = aside_event_info
with tracker.get_tracker().context(event_type, context):
track_function(event_type, event)
def handle_completion_event(block, event):
"""
Submit a completion object for the block.
"""
if not ENABLE_COMPLETION_TRACKING_SWITCH.is_enabled(): # lint-amnesty, pylint: disable=no-else-raise
raise Http404
else:
BlockCompletion.objects.submit_completion(
user=user,
block_key=block.scope_ids.usage_id,
completion=event['completion'],
)
def handle_grade_event(block, event):
"""
Submit a grade for the block.
"""
if not user.is_anonymous:
grades_signals.SCORE_PUBLISHED.send(
sender=None,
block=block,
user=user,
raw_earned=event['value'],
raw_possible=event['max_value'],
only_if_higher=event.get('only_if_higher'),
score_deleted=event.get('score_deleted'),
grader_response=event.get('grader_response')
)
def handle_deprecated_progress_event(block, event):
"""
DEPRECATED: Submit a completion for the block represented by the
progress event.
This exists to support the legacy progress extension used by
edx-solutions. New XBlocks should not emit these events, but instead
emit completion events directly.
"""
if not ENABLE_COMPLETION_TRACKING_SWITCH.is_enabled(): # lint-amnesty, pylint: disable=no-else-raise
raise Http404
else:
requested_user_id = event.get('user_id', user.id)
if requested_user_id != user.id:
log.warning(f"{user} tried to submit a completion on behalf of {requested_user_id}")
return
# If blocks explicitly declare support for the new completion API,
# we expect them to emit 'completion' events,
# and we ignore the deprecated 'progress' events
# in order to avoid duplicate work and possibly conflicting semantics.
if not getattr(block, 'has_custom_completion', False):
BlockCompletion.objects.submit_completion(
user=user,
block_key=block.scope_ids.usage_id,
completion=1.0,
)
def rebind_noauth_module_to_user(module, real_user):
"""
A function that allows a module to get re-bound to a real user if it was previously bound to an AnonymousUser.
Will only work within a module bound to an AnonymousUser, e.g. one that's instantiated by the noauth_handler.
Arguments:
module (any xblock type): the module to rebind
real_user (django.contrib.auth.models.User): the user to bind to
Returns:
nothing (but the side effect is that module is re-bound to real_user)
"""
if user.is_authenticated:
err_msg = ("rebind_noauth_module_to_user can only be called from a module bound to "
"an anonymous user")
log.error(err_msg)
raise LmsModuleRenderError(err_msg)
field_data_cache_real_user = FieldDataCache.cache_for_descriptor_descendents(
course_id,
real_user,
module,
asides=XBlockAsidesConfig.possible_asides(),
)
student_data_real_user = KvsFieldData(DjangoKeyValueStore(field_data_cache_real_user))
(inner_system, inner_student_data) = get_module_system_for_user(
user=real_user,
student_data=student_data_real_user, # These have implicit user bindings, rest of args considered not to
descriptor=module,
course_id=course_id,
track_function=track_function,
xqueue_callback_url_prefix=xqueue_callback_url_prefix,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
user_location=user_location,
request_token=request_token,
course=course,
will_recheck_access=will_recheck_access,
)
module.bind_for_student(
inner_system,
real_user.id,
[
partial(DateLookupFieldData, course_id=course_id, user=user),
partial(OverrideFieldData.wrap, real_user, course),
partial(LmsFieldData, student_data=inner_student_data),
],
)
module.scope_ids = (
module.scope_ids._replace(user_id=real_user.id)
)
# now bind the module to the new ModuleSystem instance and vice-versa
module.runtime = inner_system
inner_system.xmodule_instance = module
# Build a list of wrapping functions that will be applied in order
# to the Fragment content coming out of the xblocks that are about to be rendered.
block_wrappers = []
if is_masquerading_as_specific_student(user, course_id):
block_wrappers.append(filter_displayed_blocks)
if settings.FEATURES.get("LICENSING", False):
block_wrappers.append(wrap_with_license)
# Wrap the output display in a single div to allow for the XModule
# javascript to be bound correctly
if wrap_xmodule_display is True:
block_wrappers.append(partial(
wrap_xblock,
'LmsRuntime',
extra_data={'course-id': str(course_id)},
usage_id_serializer=lambda usage_id: quote_slashes(str(usage_id)),
request_token=request_token,
))
# TODO (cpennington): When modules are shared between courses, the static
# prefix is going to have to be specific to the module, not the directory
# that the xml was loaded from
# Rewrite urls beginning in /static to point to course-specific content
block_wrappers.append(partial(
replace_static_urls,
getattr(descriptor, 'data_dir', None),
course_id=course_id,
static_asset_path=static_asset_path or descriptor.static_asset_path
))
# Allow URLs of the form '/course/' refer to the root of multicourse directory
# hierarchy of this course
block_wrappers.append(partial(replace_course_urls, course_id))
# this will rewrite intra-courseware links (/jump_to_id/<id>). This format
# is an improvement over the /course/... format for studio authored courses,
# because it is agnostic to course-hierarchy.
# NOTE: module_id is empty string here. The 'module_id' will get assigned in the replacement
# function, we just need to specify something to get the reverse() to work.
block_wrappers.append(partial(
replace_jump_to_id_urls,
course_id,
reverse('jump_to_id', kwargs={'course_id': str(course_id), 'module_id': ''}),
))
block_wrappers.append(partial(display_access_messages, user))
block_wrappers.append(partial(course_expiration_wrapper, user))
block_wrappers.append(partial(offer_banner_wrapper, user))
if settings.FEATURES.get('DISPLAY_DEBUG_INFO_TO_STAFF'):
if is_masquerading_as_specific_student(user, course_id):
# When masquerading as a specific student, we want to show the debug button
# unconditionally to enable resetting the state of the student we are masquerading as.
# We already know the user has staff access when masquerading is active.
staff_access = True
# To figure out whether the user has instructor access, we temporarily remove the
# masquerade_settings from the real_user. With the masquerading settings in place,
# the result would always be "False".
masquerade_settings = user.real_user.masquerade_settings
del user.real_user.masquerade_settings
user.real_user.masquerade_settings = masquerade_settings
else:
staff_access = has_access(user, 'staff', descriptor, course_id)
if staff_access:
block_wrappers.append(partial(add_staff_markup, user, disable_staff_debug_info))
# These modules store data using the anonymous_student_id as a key.
# To prevent loss of data, we will continue to provide old modules with
# the per-student anonymized id (as we have in the past),
# while giving selected modules a per-course anonymized id.
# As we have the time to manually test more modules, we can add to the list
# of modules that get the per-course anonymized id.
is_pure_xblock = isinstance(descriptor, XBlock) and not isinstance(descriptor, XModuleDescriptor)
if (is_pure_xblock and not getattr(descriptor, 'requires_per_student_anonymous_id', False)):
anonymous_student_id = anonymous_id_for_user(user, course_id)
else:
anonymous_student_id = anonymous_id_for_user(user, None)
field_data = DateLookupFieldData(descriptor._field_data, course_id, user) # pylint: disable=protected-access
field_data = LmsFieldData(field_data, student_data)
user_is_staff = bool(has_access(user, 'staff', descriptor.location, course_id))
system = LmsModuleSystem(
track_function=track_function,
render_template=render_to_string,
static_url=settings.STATIC_URL,
xqueue=xqueue,
# TODO (cpennington): Figure out how to share info between systems
filestore=descriptor.runtime.resources_fs,
get_module=inner_get_module,
user=user,
debug=settings.DEBUG,
hostname=settings.SITE_NAME,
# TODO (cpennington): This should be removed when all html from
# a module is coming through get_html and is therefore covered
# by the replace_static_urls code below
replace_urls=partial(
static_replace.replace_static_urls,
data_directory=getattr(descriptor, 'data_dir', None),
course_id=course_id,
static_asset_path=static_asset_path or descriptor.static_asset_path,
),
replace_course_urls=partial(
static_replace.replace_course_urls,
course_key=course_id
),
replace_jump_to_id_urls=partial(
static_replace.replace_jump_to_id_urls,
course_id=course_id,
jump_to_id_base_url=reverse('jump_to_id', kwargs={'course_id': str(course_id), 'module_id': ''})
),
node_path=settings.NODE_PATH,
publish=publish,
anonymous_student_id=anonymous_student_id,
course_id=course_id,
cache=cache,
can_execute_unsafe_code=(lambda: can_execute_unsafe_code(course_id)),
get_python_lib_zip=(lambda: get_python_lib_zip(contentstore, course_id)),
# TODO: When we merge the descriptor and module systems, we can stop reaching into the mixologist (cpennington)
mixins=descriptor.runtime.mixologist._mixins, # pylint: disable=protected-access
wrappers=block_wrappers,
get_real_user=user_by_anonymous_id,
services={
'fs': FSService(),
'field-data': field_data,
'user': DjangoXBlockUserService(user, user_is_staff=user_is_staff),
'verification': XBlockVerificationService(),
'proctoring': ProctoringService(),
'milestones': milestones_helpers.get_service(),
'credit': CreditService(),
'bookmarks': BookmarksService(user=user),
'gating': GatingService(),
'grade_utils': GradesUtilService(course_id=course_id),
'user_state': UserStateService(),
'content_type_gating': ContentTypeGatingService(),
},
get_user_role=lambda: get_user_role(user, course_id),
descriptor_runtime=descriptor._runtime, # pylint: disable=protected-access
rebind_noauth_module_to_user=rebind_noauth_module_to_user,
user_location=user_location,
request_token=request_token,
)
# pass position specified in URL to module through ModuleSystem
if position is not None:
try:
position = int(position)
except (ValueError, TypeError):
log.exception('Non-integer %r passed as position.', position)
position = None
system.set('position', position)
system.set('user_is_staff', user_is_staff)
system.set('user_is_admin', bool(has_access(user, 'staff', 'global')))
system.set('user_is_beta_tester', CourseBetaTesterRole(course_id).has_user(user))
system.set('days_early_for_beta', descriptor.days_early_for_beta)
# make an ErrorBlock -- assuming that the descriptor's system is ok
if has_access(user, 'staff', descriptor.location, course_id):
system.error_descriptor_class = ErrorBlock
else:
system.error_descriptor_class = NonStaffErrorBlock
return system, field_data
# TODO: Find all the places that this method is called and figure out how to
# get a loaded course passed into it
def get_module_for_descriptor_internal(user, descriptor, student_data, course_id,
track_function, xqueue_callback_url_prefix, request_token,
position=None, wrap_xmodule_display=True, grade_bucket_type=None,
static_asset_path='', user_location=None, disable_staff_debug_info=False,
course=None, will_recheck_access=False):
"""
Actually implement get_module, without requiring a request.
See get_module() docstring for further details.
Arguments:
request_token (str): A unique token for this request, used to isolate xblock rendering
"""
(system, student_data) = get_module_system_for_user(
user=user,
student_data=student_data, # These have implicit user bindings, the rest of args are considered not to
descriptor=descriptor,
course_id=course_id,
track_function=track_function,
xqueue_callback_url_prefix=xqueue_callback_url_prefix,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
user_location=user_location,
request_token=request_token,
disable_staff_debug_info=disable_staff_debug_info,
course=course,
will_recheck_access=will_recheck_access,
)
descriptor.bind_for_student(
system,
user.id,
[
partial(DateLookupFieldData, course_id=course_id, user=user),
partial(OverrideFieldData.wrap, user, course),
partial(LmsFieldData, student_data=student_data),
],
)
descriptor.scope_ids = descriptor.scope_ids._replace(user_id=user.id)
# Do not check access when it's a noauth request.
# Not that the access check needs to happen after the descriptor is bound
# for the student, since there may be field override data for the student
# that affects xblock visibility.
user_needs_access_check = getattr(user, 'known', True) and not isinstance(user, SystemUser)
if user_needs_access_check:
access = has_access(user, 'load', descriptor, course_id)
# A descriptor should only be returned if either the user has access, or the user doesn't have access, but
# the failed access has a message for the user and the caller of this function specifies it will check access
# again. This allows blocks to show specific error message or upsells when access is denied.
caller_will_handle_access_error = (
not access
and will_recheck_access
and (access.user_message or access.user_fragment)
)
if access or caller_will_handle_access_error:
descriptor.has_access_error = bool(caller_will_handle_access_error)
return descriptor
return None
return descriptor
def load_single_xblock(request, user_id, course_id, usage_key_string, course=None, will_recheck_access=False):
"""
Load a single XBlock identified by usage_key_string.
"""
usage_key = UsageKey.from_string(usage_key_string)
course_key = CourseKey.from_string(course_id)
usage_key = usage_key.map_into_course(course_key)
user = User.objects.get(id=user_id)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course_key,
user,
modulestore().get_item(usage_key),
depth=0,
)
instance = get_module(
user,
request,
usage_key,
field_data_cache,
grade_bucket_type='xqueue',
course=course,
will_recheck_access=will_recheck_access
)
if instance is None:
msg = f"No module {usage_key_string} for user {user}--access denied?"
log.debug(msg)
raise Http404
return instance
@csrf_exempt
def xqueue_callback(request, course_id, userid, mod_id, dispatch):
'''
Entry point for graded results from the queueing system.
'''
data = request.POST.copy()
# Test xqueue package, which we expect to be:
# xpackage = {'xqueue_header': json.dumps({'lms_key':'secretkey',...}),
# 'xqueue_body' : 'Message from grader'}
for key in ['xqueue_header', 'xqueue_body']:
if key not in data:
raise Http404
header = json.loads(data['xqueue_header'])
if not isinstance(header, dict) or 'lms_key' not in header:
raise Http404
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = modulestore().get_course(course_key, depth=0)
instance = load_single_xblock(request, userid, course_id, mod_id, course=course)
# Transfer 'queuekey' from xqueue response header to the data.
# This is required to use the interface defined by 'handle_ajax'
data.update({'queuekey': header['lms_key']})
# We go through the "AJAX" path
# So far, the only dispatch from xqueue will be 'score_update'
try:
# Can ignore the return value--not used for xqueue_callback
instance.handle_ajax(dispatch, data)
# Save any state that has changed to the underlying KeyValueStore
instance.save()
except:
log.exception("error processing ajax call")
raise
return HttpResponse("")
@csrf_exempt
@xframe_options_exempt
@transaction.non_atomic_requests
def handle_xblock_callback_noauth(request, course_id, usage_id, handler, suffix=None):
"""
Entry point for unauthenticated XBlock handlers.
"""
request.user.known = False
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = modulestore().get_course(course_key, depth=0)
return _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, course=course)
@csrf_exempt
@xframe_options_exempt
@transaction.non_atomic_requests
def handle_xblock_callback(request, course_id, usage_id, handler, suffix=None):
"""
Generic view for extensions. This is where AJAX calls go.
Arguments:
request (Request): Django request.
course_id (str): Course containing the block
usage_id (str)
handler (str)
suffix (str)
Raises:
HttpResponseForbidden: If the request method is not `GET` and user is not authenticated.
Http404: If the course is not found in the modulestore.
"""
# In this case, we are using Session based authentication, so we need to check CSRF token.
if request.user.is_authenticated:
error = CsrfViewMiddleware().process_view(request, None, (), {})
if error:
return error
# We are reusing DRF logic to provide support for JWT and Oauth2. We abandoned the idea of using DRF view here
# to avoid introducing backwards-incompatible changes.
# You can see https://github.com/edx/XBlock/pull/383 for more details.
else:
authentication_classes = (JwtAuthentication, BearerAuthenticationAllowInactiveUser)
authenticators = [auth() for auth in authentication_classes]
for authenticator in authenticators:
try:
user_auth_tuple = authenticator.authenticate(request)
except APIException:
log.exception(
"XBlock handler %r failed to authenticate with %s", handler, authenticator.__class__.__name__
)
else:
if user_auth_tuple is not None:
request.user, _ = user_auth_tuple
break
# NOTE (CCB): Allow anonymous GET calls (e.g. for transcripts). Modifying this view is simpler than updating
# the XBlocks to use `handle_xblock_callback_noauth`, which is practically identical to this view.
if request.method != 'GET' and not (request.user and request.user.is_authenticated):
return HttpResponseForbidden('Unauthenticated')
request.user.known = request.user.is_authenticated
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
raise Http404(f'{course_id} is not a valid course key') # lint-amnesty, pylint: disable=raise-missing-from
with modulestore().bulk_operations(course_key):
try:
course = modulestore().get_course(course_key)
except ItemNotFoundError:
raise Http404(f'{course_id} does not exist in the modulestore') # lint-amnesty, pylint: disable=raise-missing-from
return _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, course=course)
def _get_usage_key_for_course(course_key, usage_id) -> UsageKey:
"""
Returns UsageKey mapped into the course for a given usage_id string
"""
try:
return UsageKey.from_string(unquote_slashes(usage_id)).map_into_course(course_key)
except InvalidKeyError as exc:
raise Http404("Invalid location") from exc
def _get_descriptor_by_usage_key(usage_key):
"""
Gets a descriptor instance based on a mapped-to-course usage_key
Returns (instance, tracking_context)
"""
try:
descriptor = modulestore().get_item(usage_key)
descriptor_orig_usage_key, descriptor_orig_version = modulestore().get_block_original_usage(usage_key)
except ItemNotFoundError as exc:
log.warning(
"Invalid location for course id %s: %s",
usage_key.course_key,
usage_key
)
raise Http404 from exc
tracking_context = {
'module': {
# xss-lint: disable=python-deprecated-display-name
'display_name': descriptor.display_name_with_default_escaped,
'usage_key': str(descriptor.location),
}
}
# For blocks that are inherited from a content library, we add some additional metadata:
if descriptor_orig_usage_key is not None:
tracking_context['module']['original_usage_key'] = str(descriptor_orig_usage_key)
tracking_context['module']['original_usage_version'] = str(descriptor_orig_version)
return descriptor, tracking_context
def get_module_by_usage_id(request, course_id, usage_id, disable_staff_debug_info=False, course=None,
will_recheck_access=False):
"""
Gets a module instance based on its `usage_id` in a course, for a given request/user
Returns (instance, tracking_context)
"""
course_key = CourseKey.from_string(course_id)
usage_key = _get_usage_key_for_course(course_key, usage_id)
descriptor, tracking_context = _get_descriptor_by_usage_key(usage_key)
_, user = setup_masquerade(request, course_key, has_access(request.user, 'staff', descriptor, course_key))
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course_key,
user,
descriptor,
read_only=CrawlersConfig.is_crawler(request),
)
instance = get_module_for_descriptor(
user,
request,
descriptor,
field_data_cache,
usage_key.course_key,
disable_staff_debug_info=disable_staff_debug_info,
course=course,
will_recheck_access=will_recheck_access,
)
if instance is None:
# Either permissions just changed, or someone is trying to be clever
# and load something they shouldn't have access to.
log.debug("No module %s for user %s -- access denied?", usage_key, user)
raise Http404
return instance, tracking_context
def _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, course=None):
"""
Invoke an XBlock handler, either authenticated or not.
Arguments:
request (HttpRequest): the current request
course_id (str): A string of the form org/course/run
usage_id (str): A string of the form i4x://org/course/category/name@revision
handler (str): The name of the handler to invoke
suffix (str): The suffix to pass to the handler when invoked
"""
# Check submitted files
files = request.FILES or {}
error_msg = _check_files_limits(files)
if error_msg:
return JsonResponse({'success': error_msg}, status=413)
# Make a CourseKey from the course_id, raising a 404 upon parse error.
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError as exc:
raise Http404 from exc
set_custom_attributes_for_course_key(course_key)
with modulestore().bulk_operations(course_key):
usage_key = _get_usage_key_for_course(course_key, usage_id)
if is_xblock_aside(usage_key):
# Get the usage key for the block being wrapped by the aside (not the aside itself)
block_usage_key = usage_key.usage_key
else:
block_usage_key = usage_key
# Peek at the handler method to see if it actually wants to check access itself. (The handler may not want
# inaccessible blocks stripped from the tree.) This ends up doing two modulestore lookups for the descriptor,
# but the blocks should be available in the request cache the second time.
# At the time of writing, this is only used by one handler. If this usage grows, we may want to re-evaluate
# how we do this to something more elegant. If you are the author of a third party block that decides it wants
# to set this too, please let us know so we can consider making this easier / better-documented.
descriptor, _ = _get_descriptor_by_usage_key(block_usage_key)
handler_method = getattr(descriptor, handler, False)
will_recheck_access = handler_method and getattr(handler_method, 'will_recheck_access', False)
instance, tracking_context = get_module_by_usage_id(
request, course_id, str(block_usage_key), course=course, will_recheck_access=will_recheck_access,
)
# Name the transaction so that we can view XBlock handlers separately in
# New Relic. The suffix is necessary for XModule handlers because the
# "handler" in those cases is always just "xmodule_handler".
nr_tx_name = f"{instance.__class__.__name__}.{handler}"
nr_tx_name += f"/{suffix}" if (suffix and handler == "xmodule_handler") else ""
set_monitoring_transaction_name(nr_tx_name, group="Python/XBlock/Handler")
tracking_context_name = 'module_callback_handler'
req = django_to_webob_request(request)
try:
with tracker.get_tracker().context(tracking_context_name, tracking_context):
if is_xblock_aside(usage_key):
# In this case, 'instance' is the XBlock being wrapped by the aside, so
# the actual aside instance needs to be retrieved in order to invoke its
# handler method.
handler_instance = get_aside_from_xblock(instance, usage_key.aside_type)
else:
handler_instance = instance
resp = handler_instance.handle(handler, req, suffix)
if suffix == 'problem_check' \
and course \
and getattr(course, 'entrance_exam_enabled', False) \
and getattr(instance, 'in_entrance_exam', False):
ee_data = {'entrance_exam_passed': user_has_passed_entrance_exam(request.user, course)}
resp = append_data_to_webob_response(resp, ee_data)
except NoSuchHandlerError:
log.exception("XBlock %s attempted to access missing handler %r", instance, handler)
raise Http404 # lint-amnesty, pylint: disable=raise-missing-from
# If we can't find the module, respond with a 404
except NotFoundError:
log.exception("Module indicating to user that request doesn't exist")
raise Http404 # lint-amnesty, pylint: disable=raise-missing-from
# For XModule-specific errors, we log the error and respond with an error message
except ProcessingError as err:
log.warning("Module encountered an error while processing AJAX call",
exc_info=True)
return JsonResponse({'success': err.args[0]}, status=200)
# If any other error occurred, re-raise it to trigger a 500 response
except Exception:
log.exception("error executing xblock handler")
raise
return webob_to_django_response(resp)
@api_view(['GET'])
@view_auth_classes(is_authenticated=True)
def xblock_view(request, course_id, usage_id, view_name):
"""
Returns the rendered view of a given XBlock, with related resources
Returns a json object containing two keys:
html: The rendered html of the view
resources: A list of tuples where the first element is the resource hash, and
the second is the resource description
"""
if not settings.FEATURES.get('ENABLE_XBLOCK_VIEW_ENDPOINT', False):
log.warning("Attempt to use deactivated XBlock view endpoint -"
" see FEATURES['ENABLE_XBLOCK_VIEW_ENDPOINT']")
raise Http404
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
raise Http404("Invalid location") # lint-amnesty, pylint: disable=raise-missing-from
with modulestore().bulk_operations(course_key):
course = modulestore().get_course(course_key)
instance, _ = get_module_by_usage_id(request, course_id, usage_id, course=course)
try:
fragment = instance.render(view_name, context=request.GET)
except NoSuchViewError:
log.exception("Attempt to render missing view on %s: %s", instance, view_name)
raise Http404 # lint-amnesty, pylint: disable=raise-missing-from
hashed_resources = OrderedDict()
for resource in fragment.resources:
hashed_resources[hash_resource(resource)] = resource
return JsonResponse({
'html': fragment.content,
'resources': list(hashed_resources.items()),
'csrf_token': str(csrf(request)['csrf_token']),
})
def _check_files_limits(files):
"""
Check if the files in a request are under the limits defined by
`settings.MAX_FILEUPLOADS_PER_INPUT` and
`settings.STUDENT_FILEUPLOAD_MAX_SIZE`.
Returns None if files are correct or an error messages otherwise.
"""
for fileinput_id in files.keys():
inputfiles = files.getlist(fileinput_id)
# Check number of files submitted
if len(inputfiles) > settings.MAX_FILEUPLOADS_PER_INPUT:
msg = 'Submission aborted! Maximum %d files may be submitted at once' % \
settings.MAX_FILEUPLOADS_PER_INPUT
return msg
# Check file sizes
for inputfile in inputfiles:
if inputfile.size > settings.STUDENT_FILEUPLOAD_MAX_SIZE: # Bytes
msg = 'Submission aborted! Your file "%s" is too large (max size: %d MB)' % \
(inputfile.name, settings.STUDENT_FILEUPLOAD_MAX_SIZE / (1000 ** 2))
return msg
return None
def append_data_to_webob_response(response, data):
"""
Appends data to a JSON webob response.
Arguments:
response (webob response object): the webob response object that needs to be modified
data (dict): dictionary containing data that needs to be appended to response body
Returns:
(webob response object): webob response with updated body.
"""
if getattr(response, 'content_type', None) == 'application/json':
json_input = response.body.decode('utf-8') if isinstance(response.body, bytes) else response.body
response_data = json.loads(json_input)
response_data.update(data)
response.body = json.dumps(response_data).encode('utf-8')
return response
|
Krossom/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/distutils/log.py
|
163
|
"""A simple log mechanism styled after PEP 282."""
# The class here is styled after PEP 282 so that it could later be
# replaced with a standard Python logging implementation.
DEBUG = 1
INFO = 2
WARN = 3
ERROR = 4
FATAL = 5
import sys
class Log:
def __init__(self, threshold=WARN):
self.threshold = threshold
def _log(self, level, msg, args):
if level not in (DEBUG, INFO, WARN, ERROR, FATAL):
raise ValueError('%s wrong log level' % str(level))
if level >= self.threshold:
if args:
msg = msg % args
if level in (WARN, ERROR, FATAL):
stream = sys.stderr
else:
stream = sys.stdout
if stream.errors == 'strict':
# emulate backslashreplace error handler
encoding = stream.encoding
msg = msg.encode(encoding, "backslashreplace").decode(encoding)
stream.write('%s\n' % msg)
stream.flush()
def log(self, level, msg, *args):
self._log(level, msg, args)
def debug(self, msg, *args):
self._log(DEBUG, msg, args)
def info(self, msg, *args):
self._log(INFO, msg, args)
def warn(self, msg, *args):
self._log(WARN, msg, args)
def error(self, msg, *args):
self._log(ERROR, msg, args)
def fatal(self, msg, *args):
self._log(FATAL, msg, args)
_global_log = Log()
log = _global_log.log
debug = _global_log.debug
info = _global_log.info
warn = _global_log.warn
error = _global_log.error
fatal = _global_log.fatal
def set_threshold(level):
# return the old threshold for use from tests
old = _global_log.threshold
_global_log.threshold = level
return old
def set_verbosity(v):
if v <= 0:
set_threshold(WARN)
elif v == 1:
set_threshold(INFO)
elif v >= 2:
set_threshold(DEBUG)
|
MariaPet/sqlparse
|
refs/heads/master
|
extras/buildbot/googlecode_atom.py
|
12
|
# GoogleCode Atom Feed Poller
# Author: Srivats P. <pstavirs>
# Based on Mozilla's HgPoller
# http://bonsai.mozilla.org/cvsblame.cgi?file=/mozilla/tools/buildbot/buildbot/changes/Attic/hgpoller.py&revision=1.1.4.2
#
# Description:
# Use this ChangeSource for projects hosted on http://code.google.com/
#
# This ChangeSource uses the project's commit Atom feed. Depending upon the
# frequency of commits, you can tune the polling interval for the feed
# (default is 1 hour)
#
# Parameters:
# feedurl (MANDATORY): The Atom feed URL of the GoogleCode repo
# pollinterval (OPTIONAL): Polling frequency for the feed (in seconds)
#
# Example:
# To poll the Ostinato project's commit feed every 3 hours, use -
# from googlecode_atom import GoogleCodeAtomPoller
# poller = GoogleCodeAtomPoller(
# feedurl="http://code.google.com/feeds/p/ostinato/hgchanges/basic",
# pollinterval=10800)
# c['change_source'] = [ poller ]
#
from time import strptime
from calendar import timegm
from xml.dom import minidom, Node
from twisted.python import log, failure
from twisted.internet import defer, reactor
from twisted.internet.task import LoopingCall
from twisted.web.client import getPage
from buildbot.changes import base, changes
def googleCodePollerForProject(project, vcs, pollinterval=3600):
return GoogleCodeAtomPoller(
'http://code.google.com/feeds/p/%s/%schanges/basic' % (project, vcs),
pollinterval=pollinterval)
class GoogleCodeAtomPoller(base.ChangeSource):
"""This source will poll a GoogleCode Atom feed for changes and
submit them to the change master. Works for both Svn and Hg repos.
TODO: branch processing
"""
compare_attrs = ['feedurl', 'pollinterval']
parent = None
loop = None
volatile = ['loop']
working = False
def __init__(self, feedurl, pollinterval=3600):
"""
@type feedurl: string
@param feedurl: The Atom feed URL of the GoogleCode repo
(e.g. http://code.google.com/feeds/p/ostinato/hgchanges/basic)
@type pollinterval: int
@param pollinterval: The time (in seconds) between queries for
changes (default is 1 hour)
"""
self.feedurl = feedurl
self.branch = None
self.pollinterval = pollinterval
self.lastChange = None
self.loop = LoopingCall(self.poll)
def startService(self):
log.msg("GoogleCodeAtomPoller starting")
base.ChangeSource.startService(self)
reactor.callLater(0, self.loop.start, self.pollinterval)
def stopService(self):
log.msg("GoogleCodeAtomPoller stoppping")
self.loop.stop()
return base.ChangeSource.stopService(self)
def describe(self):
return ("Getting changes from the GoogleCode repo changes feed %s" %
self._make_url())
def poll(self):
if self.working:
log.msg("Not polling because last poll is still working")
else:
self.working = True
d = self._get_changes()
d.addCallback(self._process_changes)
d.addCallbacks(self._finished_ok, self._finished_failure)
def _finished_ok(self, res):
assert self.working
self.working = False
log.msg("GoogleCodeAtomPoller poll success")
return res
def _finished_failure(self, res):
log.msg("GoogleCodeAtomPoller poll failed: %s" % res)
assert self.working
self.working = False
return None
def _make_url(self):
return "%s" % (self.feedurl)
def _get_changes(self):
url = self._make_url()
log.msg("GoogleCodeAtomPoller polling %s" % url)
return getPage(url, timeout=self.pollinterval)
def _parse_changes(self, query):
dom = minidom.parseString(query)
entries = dom.getElementsByTagName("entry")
changes = []
# Entries come in reverse chronological order
for i in entries:
d = {}
# revision is the last part of the 'id' url
d["revision"] = i.getElementsByTagName(
"id")[0].firstChild.data.split('/')[-1]
if d["revision"] == self.lastChange:
break # no more new changes
d["when"] = timegm(strptime(
i.getElementsByTagName("updated")[0].firstChild.data,
"%Y-%m-%dT%H:%M:%SZ"))
d["author"] = i.getElementsByTagName(
"author")[0].getElementsByTagName("name")[0].firstChild.data
# files and commit msg are separated by 2 consecutive <br/>
content = i.getElementsByTagName(
"content")[0].firstChild.data.split("<br/>\n <br/>")
# Remove the action keywords from the file list
fl = content[0].replace(
u' \xa0\xa0\xa0\xa0Add\xa0\xa0\xa0\xa0', '').replace(
u' \xa0\xa0\xa0\xa0Delete\xa0\xa0\xa0\xa0', '').replace(
u' \xa0\xa0\xa0\xa0Modify\xa0\xa0\xa0\xa0', '')
# Get individual files and remove the 'header'
d["files"] = fl.encode("ascii", "replace").split("<br/>")[1:]
d["files"] = [f.strip() for f in d["files"]]
try:
d["comments"] = content[1].encode("ascii", "replace")
except:
d["comments"] = "No commit message provided"
changes.append(d)
changes.reverse() # want them in chronological order
return changes
def _process_changes(self, query):
change_list = self._parse_changes(query)
# Skip calling addChange() if this is the first successful poll.
if self.lastChange is not None:
for change in change_list:
c = changes.Change(revision = change["revision"],
who = change["author"],
files = change["files"],
comments = change["comments"],
when = change["when"],
branch = self.branch)
self.parent.addChange(c)
if change_list:
self.lastChange = change_list[-1]["revision"]
|
TheTimmy/spack
|
refs/heads/develop
|
var/spack/repos/builtin/packages/font-misc-cyrillic/package.py
|
3
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class FontMiscCyrillic(Package):
"""X.org misc-cyrillic font."""
homepage = "http://cgit.freedesktop.org/xorg/font/misc-cyrillic"
url = "https://www.x.org/archive/individual/font/font-misc-cyrillic-1.0.3.tar.gz"
version('1.0.3', 'e7b13da5325f62dd3f630beade6d2656')
depends_on('font-util')
depends_on('fontconfig', type='build')
depends_on('mkfontdir', type='build')
depends_on('bdftopcf', type='build')
depends_on('pkg-config@0.9.0:', type='build')
depends_on('util-macros', type='build')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make()
make('install')
# `make install` copies the files to the font-util installation.
# Create a fake directory to convince Spack that we actually
# installed something.
mkdir(prefix.lib)
|
yuanzhao/gpdb
|
refs/heads/master
|
src/test/tinc/ext/unittest2/runner.py
|
164
|
"""Running tests"""
import sys
import time
import unittest
from unittest2 import result
try:
from unittest2.signals import registerResult
except ImportError:
def registerResult(_):
pass
__unittest = True
class _WritelnDecorator(object):
"""Used to decorate file-like objects with a handy 'writeln' method"""
def __init__(self,stream):
self.stream = stream
def __getattr__(self, attr):
if attr in ('stream', '__getstate__'):
raise AttributeError(attr)
return getattr(self.stream,attr)
def writeln(self, arg=None):
if arg:
self.write(arg)
self.write('\n') # text-mode streams translate to \r\n if needed
class TextTestResult(result.TestResult):
"""A test result class that can print formatted text results to a stream.
Used by TextTestRunner.
"""
separator1 = '=' * 70
separator2 = '-' * 70
def __init__(self, stream, descriptions, verbosity):
super(TextTestResult, self).__init__()
self.stream = stream
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.descriptions = descriptions
def getDescription(self, test):
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def startTest(self, test):
super(TextTestResult, self).startTest(test)
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addSuccess(self, test):
super(TextTestResult, self).addSuccess(test)
if self.showAll:
self.stream.writeln("ok")
elif self.dots:
self.stream.write('.')
self.stream.flush()
def addError(self, test, err):
super(TextTestResult, self).addError(test, err)
if self.showAll:
self.stream.writeln("ERROR")
elif self.dots:
self.stream.write('E')
self.stream.flush()
def addFailure(self, test, err):
super(TextTestResult, self).addFailure(test, err)
if self.showAll:
self.stream.writeln("FAIL")
elif self.dots:
self.stream.write('F')
self.stream.flush()
def addSkip(self, test, reason):
super(TextTestResult, self).addSkip(test, reason)
if self.showAll:
self.stream.writeln("skipped %r" % (reason,))
elif self.dots:
self.stream.write("s")
self.stream.flush()
def addExpectedFailure(self, test, err):
super(TextTestResult, self).addExpectedFailure(test, err)
if self.showAll:
self.stream.writeln("expected failure")
elif self.dots:
self.stream.write("x")
self.stream.flush()
def addUnexpectedSuccess(self, test):
super(TextTestResult, self).addUnexpectedSuccess(test)
if self.showAll:
self.stream.writeln("unexpected success")
elif self.dots:
self.stream.write("u")
self.stream.flush()
def printErrors(self):
if self.dots or self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavour, errors):
for test, err in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % (flavour, self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln("%s" % err)
def stopTestRun(self):
super(TextTestResult, self).stopTestRun()
self.printErrors()
class TextTestRunner(unittest.TextTestRunner):
"""A test runner class that displays results in textual form.
It prints out the names of tests as they are run, errors as they
occur, and a summary of the results at the end of the test run.
"""
resultclass = TextTestResult
def __init__(self, stream=sys.stderr, descriptions=True, verbosity=1,
failfast=False, buffer=False, resultclass=None):
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
self.failfast = failfast
self.buffer = buffer
if resultclass is not None:
self.resultclass = resultclass
def _makeResult(self):
return self.resultclass(self.stream, self.descriptions, self.verbosity)
def run(self, test):
"Run the given test case or test suite."
result = self._makeResult()
result.failfast = self.failfast
result.buffer = self.buffer
registerResult(result)
startTime = time.time()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
try:
test(result)
finally:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
else:
result.printErrors()
stopTime = time.time()
timeTaken = stopTime - startTime
if hasattr(result, 'separator2'):
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
expectedFails = unexpectedSuccesses = skipped = 0
try:
results = map(len, (result.expectedFailures,
result.unexpectedSuccesses,
result.skipped))
expectedFails, unexpectedSuccesses, skipped = results
except AttributeError:
pass
infos = []
if not result.wasSuccessful():
self.stream.write("FAILED")
failed, errored = map(len, (result.failures, result.errors))
if failed:
infos.append("failures=%d" % failed)
if errored:
infos.append("errors=%d" % errored)
else:
self.stream.write("OK")
if skipped:
infos.append("skipped=%d" % skipped)
if expectedFails:
infos.append("expected failures=%d" % expectedFails)
if unexpectedSuccesses:
infos.append("unexpected successes=%d" % unexpectedSuccesses)
if infos:
self.stream.writeln(" (%s)" % (", ".join(infos),))
else:
self.stream.write("\n")
return result
|
JioCloud/horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/project/instances/tables.py
|
1
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.conf import settings
from django.core import urlresolvers
from django.http import HttpResponse # noqa
from django import shortcuts
from django import template
from django.template.defaultfilters import title # noqa
from django.utils.http import urlencode
from django.utils.translation import string_concat # noqa
from django.utils.translation import ugettext_lazy as _
from horizon import conf
from horizon import exceptions
from horizon import messages
from horizon import tables
from horizon.templatetags import sizeformat
from horizon.utils import filters
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.access_and_security.floating_ips \
import workflows
from openstack_dashboard.dashboards.project.instances import tabs
LOG = logging.getLogger(__name__)
ACTIVE_STATES = ("ACTIVE",)
VOLUME_ATTACH_READY_STATES = ("ACTIVE", "SHUTOFF")
SNAPSHOT_READY_STATES = ("ACTIVE", "SHUTOFF", "PAUSED", "SUSPENDED")
POWER_STATES = {
0: "NO STATE",
1: "RUNNING",
2: "BLOCKED",
3: "PAUSED",
4: "SHUTDOWN",
5: "SHUTOFF",
6: "CRASHED",
7: "SUSPENDED",
8: "FAILED",
9: "BUILDING",
}
PAUSE = 0
UNPAUSE = 1
SUSPEND = 0
RESUME = 1
def is_deleting(instance):
task_state = getattr(instance, "OS-EXT-STS:task_state", None)
if not task_state:
return False
return task_state.lower() == "deleting"
class TerminateInstance(tables.BatchAction):
name = "terminate"
action_present = _("Terminate")
action_past = _("Scheduled termination of %(data_type)s")
data_type_singular = _("Instance")
data_type_plural = _("Instances")
classes = ("ajax-modal", "btn-danger",)
icon = "off"
policy_rules = (("compute", "compute:delete"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance=None):
"""Allow terminate action if instance not currently being deleted."""
return not is_deleting(instance)
def action(self, request, obj_id):
api.nova.server_delete(request, obj_id)
class RebootInstance(tables.BatchAction):
name = "reboot"
action_present = _("Hard Reboot")
action_past = _("Hard Rebooted")
data_type_singular = _("Instance")
data_type_plural = _("Instances")
classes = ('btn-danger', 'btn-reboot')
policy_rules = (("compute", "compute:reboot"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance=None):
if instance is not None:
return ((instance.status in ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not is_deleting(instance))
else:
return True
def action(self, request, obj_id):
api.nova.server_reboot(request, obj_id, soft_reboot=False)
class SoftRebootInstance(RebootInstance):
name = "soft_reboot"
action_present = _("Soft Reboot")
action_past = _("Soft Rebooted")
def action(self, request, obj_id):
api.nova.server_reboot(request, obj_id, soft_reboot=True)
class TogglePause(tables.BatchAction):
name = "pause"
action_present = (_("Pause"), _("Resume"))
action_past = (_("Paused"), _("Resumed"))
data_type_singular = _("Instance")
data_type_plural = _("Instances")
icon = "pause"
def allowed(self, request, instance=None):
if not api.nova.extension_supported('AdminActions',
request):
return False
if not instance:
return False
self.paused = instance.status == "PAUSED"
if self.paused:
self.current_present_action = UNPAUSE
policy = (("compute", "compute_extension:admin_actions:unpause"),)
else:
self.current_present_action = PAUSE
policy = (("compute", "compute_extension:admin_actions:pause"),)
has_permission = True
policy_check = getattr(settings, "POLICY_CHECK_FUNCTION", None)
if policy_check:
has_permission = policy_check(policy, request,
target={'project_id': getattr(instance, 'tenant_id', None)})
return (has_permission
and (instance.status in ACTIVE_STATES or self.paused)
and not is_deleting(instance))
def action(self, request, obj_id):
if self.paused:
api.nova.server_unpause(request, obj_id)
self.current_past_action = UNPAUSE
else:
api.nova.server_pause(request, obj_id)
self.current_past_action = PAUSE
class ToggleSuspend(tables.BatchAction):
name = "suspend"
action_present = (_("Suspend"), _("Resume"))
action_past = (_("Suspended"), _("Resumed"))
data_type_singular = _("Instance")
data_type_plural = _("Instances")
classes = ("btn-suspend",)
def allowed(self, request, instance=None):
if not api.nova.extension_supported('AdminActions',
request):
return False
if not instance:
return False
self.suspended = instance.status == "SUSPENDED"
if self.suspended:
self.current_present_action = RESUME
policy = (("compute", "compute_extension:admin_actions:resume"),)
else:
self.current_present_action = SUSPEND
policy = (("compute", "compute_extension:admin_actions:suspend"),)
has_permission = True
policy_check = getattr(settings, "POLICY_CHECK_FUNCTION", None)
if policy_check:
has_permission = policy_check(policy, request,
target={'project_id': getattr(instance, 'tenant_id', None)})
return (has_permission
and (instance.status in ACTIVE_STATES or self.suspended)
and not is_deleting(instance))
def action(self, request, obj_id):
if self.suspended:
api.nova.server_resume(request, obj_id)
self.current_past_action = RESUME
else:
api.nova.server_suspend(request, obj_id)
self.current_past_action = SUSPEND
class LaunchLink(tables.LinkAction):
name = "launch"
verbose_name = _("Launch Instance")
url = "horizon:project:instances:launch"
classes = ("ajax-modal", "btn-launch")
icon = "cloud-upload"
policy_rules = (("compute", "compute:create"),)
ajax = True
def __init__(self, attrs=None, **kwargs):
kwargs['preempt'] = True
super(LaunchLink, self).__init__(attrs, **kwargs)
def allowed(self, request, datum):
try:
limits = api.nova.tenant_absolute_limits(request, reserved=True)
instances_available = limits['maxTotalInstances'] \
- limits['totalInstancesUsed']
cores_available = limits['maxTotalCores'] \
- limits['totalCoresUsed']
ram_available = limits['maxTotalRAMSize'] - limits['totalRAMUsed']
if instances_available <= 0 or cores_available <= 0 \
or ram_available <= 0:
if "disabled" not in self.classes:
self.classes = [c for c in self.classes] + ['disabled']
self.verbose_name = string_concat(self.verbose_name, ' ',
_("(Quota exceeded)"))
else:
self.verbose_name = _("Launch Instance")
classes = [c for c in self.classes if c != "disabled"]
self.classes = classes
except Exception:
LOG.exception("Failed to retrieve quota information")
# If we can't get the quota information, leave it to the
# API to check when launching
return True # The action should always be displayed
def single(self, table, request, object_id=None):
self.allowed(request, None)
return HttpResponse(self.render())
class EditInstance(tables.LinkAction):
name = "edit"
verbose_name = _("Edit Instance")
url = "horizon:project:instances:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("compute", "compute:update"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def get_link_url(self, project):
return self._get_link_url(project, 'instance_info')
def _get_link_url(self, project, step_slug):
base_url = urlresolvers.reverse(self.url, args=[project.id])
param = urlencode({"step": step_slug})
return "?".join([base_url, param])
def allowed(self, request, instance):
return not is_deleting(instance)
class EditInstanceSecurityGroups(EditInstance):
name = "edit_secgroups"
verbose_name = _("Edit Security Groups")
def get_link_url(self, project):
return self._get_link_url(project, 'update_security_groups')
def allowed(self, request, instance=None):
return (instance.status in ACTIVE_STATES and
not is_deleting(instance) and
request.user.tenant_id == instance.tenant_id)
class CreateSnapshot(tables.LinkAction):
name = "snapshot"
verbose_name = _("Create Snapshot")
url = "horizon:project:images:snapshots:create"
classes = ("ajax-modal",)
icon = "camera"
policy_rules = (("compute", "compute:snapshot"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance=None):
return instance.status in SNAPSHOT_READY_STATES \
and not is_deleting(instance)
class ConsoleLink(tables.LinkAction):
name = "console"
verbose_name = _("Console")
url = "horizon:project:instances:detail"
classes = ("btn-console",)
policy_rules = (("compute", "compute_extension:consoles"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance=None):
return instance.status in ACTIVE_STATES and not is_deleting(instance)
def get_link_url(self, datum):
base_url = super(ConsoleLink, self).get_link_url(datum)
tab_query_string = tabs.ConsoleTab(
tabs.InstanceDetailTabs).get_query_string()
return "?".join([base_url, tab_query_string])
class LogLink(tables.LinkAction):
name = "log"
verbose_name = _("View Log")
url = "horizon:project:instances:detail"
classes = ("btn-log",)
policy_rules = (("compute", "compute_extension:console_output"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance=None):
return instance.status in ACTIVE_STATES and not is_deleting(instance)
def get_link_url(self, datum):
base_url = super(LogLink, self).get_link_url(datum)
tab_query_string = tabs.LogTab(
tabs.InstanceDetailTabs).get_query_string()
return "?".join([base_url, tab_query_string])
class ResizeLink(tables.LinkAction):
name = "resize"
verbose_name = _("Resize Instance")
url = "horizon:project:instances:resize"
classes = ("ajax-modal", "btn-resize")
policy_rules = (("compute", "compute:resize"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def get_link_url(self, project):
return self._get_link_url(project, 'flavor_choice')
def _get_link_url(self, project, step_slug):
base_url = urlresolvers.reverse(self.url, args=[project.id])
param = urlencode({"step": step_slug})
return "?".join([base_url, param])
def allowed(self, request, instance):
return ((instance.status in ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not is_deleting(instance))
class ConfirmResize(tables.Action):
name = "confirm"
verbose_name = _("Confirm Resize/Migrate")
classes = ("btn-confirm", "btn-action-required")
policy_rules = (("compute", "compute:confirm_resize"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance):
return instance.status == 'VERIFY_RESIZE'
def single(self, table, request, instance):
api.nova.server_confirm_resize(request, instance)
class RevertResize(tables.Action):
name = "revert"
verbose_name = _("Revert Resize/Migrate")
classes = ("btn-revert", "btn-action-required")
policy_rules = (("compute", "compute:revert_resize"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance):
return instance.status == 'VERIFY_RESIZE'
def single(self, table, request, instance):
api.nova.server_revert_resize(request, instance)
class RebuildInstance(tables.LinkAction):
name = "rebuild"
verbose_name = _("Rebuild Instance")
classes = ("btn-rebuild", "ajax-modal")
url = "horizon:project:instances:rebuild"
policy_rules = (("compute", "compute:rebuild"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance):
return ((instance.status in ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not is_deleting(instance))
def get_link_url(self, datum):
instance_id = self.table.get_object_id(datum)
return urlresolvers.reverse(self.url, args=[instance_id])
class DecryptInstancePassword(tables.LinkAction):
name = "decryptpassword"
verbose_name = _("Retrieve Password")
classes = ("btn-decrypt", "ajax-modal")
url = "horizon:project:instances:decryptpassword"
def allowed(self, request, instance):
enable = getattr(settings,
'OPENSTACK_ENABLE_PASSWORD_RETRIEVE',
False)
return (enable
and (instance.status in ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not is_deleting(instance)
and get_keyname(instance) is not None)
def get_link_url(self, datum):
instance_id = self.table.get_object_id(datum)
keypair_name = get_keyname(datum)
return urlresolvers.reverse(self.url, args=[instance_id,
keypair_name])
class AssociateIP(tables.LinkAction):
name = "associate"
verbose_name = _("Associate Floating IP")
url = "horizon:project:access_and_security:floating_ips:associate"
classes = ("ajax-modal",)
icon = "link"
policy_rules = (("compute", "network:associate_floating_ip"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance):
if api.network.floating_ip_simple_associate_supported(request):
return False
return not is_deleting(instance)
def get_link_url(self, datum):
base_url = urlresolvers.reverse(self.url)
next = urlresolvers.reverse("horizon:project:instances:index")
params = {"instance_id": self.table.get_object_id(datum),
workflows.IPAssociationWorkflow.redirect_param_name: next}
params = urlencode(params)
return "?".join([base_url, params])
class SimpleAssociateIP(tables.Action):
name = "associate-simple"
verbose_name = _("Associate Floating IP")
icon = "link"
policy_rules = (("compute", "network:associate_floating_ip"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance):
if not api.network.floating_ip_simple_associate_supported(request):
return False
return not is_deleting(instance)
def single(self, table, request, instance_id):
try:
# target_id is port_id for Neutron and instance_id for Nova Network
# (Neutron API wrapper returns a 'portid_fixedip' string)
target_id = api.network.floating_ip_target_get_by_instance(
request, instance_id).split('_')[0]
fip = api.network.tenant_floating_ip_allocate(request)
api.network.floating_ip_associate(request, fip.id, target_id)
messages.success(request,
_("Successfully associated floating IP: %s")
% fip.ip)
except Exception:
exceptions.handle(request,
_("Unable to associate floating IP."))
return shortcuts.redirect("horizon:project:instances:index")
class SimpleDisassociateIP(tables.Action):
name = "disassociate"
verbose_name = _("Disassociate Floating IP")
classes = ("btn-danger", "btn-disassociate",)
policy_rules = (("compute", "network:disassociate_floating_ip"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance):
if not conf.HORIZON_CONFIG["simple_ip_management"]:
return False
return not is_deleting(instance)
def single(self, table, request, instance_id):
try:
# target_id is port_id for Neutron and instance_id for Nova Network
# (Neutron API wrapper returns a 'portid_fixedip' string)
targets = api.network.floating_ip_target_list_by_instance(
request, instance_id)
target_ids = [t.split('_')[0] for t in targets]
fips = [fip for fip in api.network.tenant_floating_ip_list(request)
if fip.port_id in target_ids]
# Removing multiple floating IPs at once doesn't work, so this pops
# off the first one.
if fips:
fip = fips.pop()
api.network.floating_ip_disassociate(request,
fip.id, fip.port_id)
messages.success(request,
_("Successfully disassociated "
"floating IP: %s") % fip.ip)
else:
messages.info(request, _("No floating IPs to disassociate."))
except Exception:
exceptions.handle(request,
_("Unable to disassociate floating IP."))
return shortcuts.redirect("horizon:project:instances:index")
def instance_fault_to_friendly_message(instance):
fault = getattr(instance, 'fault', {})
message = fault.get('message', _("Unknown"))
default_message = _("Please try again later [Error: %s].") % message
fault_map = {
'NoValidHost': _("There is not enough capacity for this "
"flavor in the selected availability zone. "
"Try again later or select a different availability "
"zone.")
}
return fault_map.get(message, default_message)
def get_instance_error(instance):
if instance.status.lower() != 'error':
return None
message = instance_fault_to_friendly_message(instance)
preamble = _('Failed to launch instance "%s"'
) % instance.name or instance.id
message = string_concat(preamble, ': ', message)
return message
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, instance_id):
instance = api.nova.server_get(request, instance_id)
instance.full_flavor = api.nova.flavor_get(request,
instance.flavor["id"])
error = get_instance_error(instance)
if error:
messages.error(request, error)
return instance
class StartInstance(tables.BatchAction):
name = "start"
action_present = _("Start")
action_past = _("Started")
data_type_singular = _("Instance")
data_type_plural = _("Instances")
policy_rules = (("compute", "compute:start"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance):
return instance.status in ("SHUTDOWN", "SHUTOFF", "CRASHED")
def action(self, request, obj_id):
api.nova.server_start(request, obj_id)
class StopInstance(tables.BatchAction):
name = "stop"
action_present = _("Shut Off")
action_past = _("Shut Off")
data_type_singular = _("Instance")
data_type_plural = _("Instances")
classes = ('btn-danger',)
policy_rules = (("compute", "compute:stop"),)
def get_policy_target(self, request, datum=None):
project_id = None
if datum:
project_id = getattr(datum, 'tenant_id', None)
return {"project_id": project_id}
def allowed(self, request, instance):
return ((get_power_state(instance)
in ("RUNNING", "PAUSED", "SUSPENDED"))
and not is_deleting(instance))
def action(self, request, obj_id):
api.nova.server_stop(request, obj_id)
def get_ips(instance):
template_name = 'project/instances/_instance_ips.html'
context = {"instance": instance}
return template.loader.render_to_string(template_name, context)
def get_size(instance):
if hasattr(instance, "full_flavor"):
template_name = 'project/instances/_instance_flavor.html'
size_ram = sizeformat.mb_float_format(instance.full_flavor.ram)
if instance.full_flavor.disk > 0:
size_disk = sizeformat.diskgbformat(instance.full_flavor.disk)
else:
size_disk = _("%s GB") % "0"
context = {
"name": instance.full_flavor.name,
"id": instance.id,
"size_disk": size_disk,
"size_ram": size_ram,
"vcpus": instance.full_flavor.vcpus
}
return template.loader.render_to_string(template_name, context)
return _("Not available")
def get_keyname(instance):
if hasattr(instance, "key_name"):
keyname = instance.key_name
return keyname
return _("Not available")
def get_power_state(instance):
return POWER_STATES.get(getattr(instance, "OS-EXT-STS:power_state", 0), '')
STATUS_DISPLAY_CHOICES = (
("deleted", _("Deleted")),
("active", _("Active")),
("shutoff", _("Shutoff")),
("suspended", _("Suspended")),
("paused", _("Paused")),
("error", _("Error")),
("resize", _("Resize/Migrate")),
("verify_resize", _("Confirm or Revert Resize/Migrate")),
("revert_resize", _("Revert Resize/Migrate")),
("reboot", _("Reboot")),
("hard_reboot", _("Hard Reboot")),
("password", _("Password")),
("rebuild", _("Rebuild")),
("migrating", _("Migrating")),
("build", _("Build")),
("rescue", _("Rescue")),
("deleted", _("Deleted")),
("soft_deleted", _("Soft Deleted")),
("shelved", _("Shelved")),
("shelved_offloaded", _("Shelved Offloaded")),
)
TASK_DISPLAY_CHOICES = (
("scheduling", _("Scheduling")),
("block_device_mapping", _("Block Device Mapping")),
("networking", _("Networking")),
("spawning", _("Spawning")),
("image_snapshot", _("Snapshotting")),
("image_snapshot_pending", _("Image Snapshot Pending")),
("image_pending_upload", _("Image Pending Upload")),
("image_uploading", _("Image Uploading")),
("image_backup", _("Image Backup")),
("updating_password", _("Updating Password")),
("resize_prep", _("Preparing Resize or Migrate")),
("resize_migrating", _("Resizing or Migrating")),
("resize_migrated", _("Resized or Migrated")),
("resize_finish", _("Finishing Resize or Migrate")),
("resize_reverting", _("Reverting Resize or Migrate")),
("resize_confirming", _("Confirming Resize or Migrate")),
("rebooting", _("Rebooting")),
("rebooting_hard", _("Rebooting Hard")),
("pausing", _("Pausing")),
("unpausing", _("Resuming")),
("suspending", _("Suspending")),
("resuming", _("Resuming")),
("powering-off", _("Powering Off")),
("powering-on", _("Powering On")),
("rescuing", _("Rescuing")),
("unrescuing", _("Unrescuing")),
("rebuilding", _("Rebuilding")),
("rebuild_block_device_mapping", _("Rebuild Block Device Mapping")),
("rebuild_spawning", _("Rebuild Spawning")),
("migrating", _("Migrating")),
("deleting", _("Deleting")),
("soft-deleting", _("Soft Deleting")),
("restoring", _("Restoring")),
("shelving", _("Shelving")),
("shelving_image_pending_upload", _("Shelving Image Pending Upload")),
("shelving_image_uploading", _("Shelving Image Uploading")),
("shelving_offloading", _("Shelving Offloading")),
("unshelving", _("Unshelving")),
)
POWER_DISPLAY_CHOICES = (
("NO STATE", _("No State")),
("RUNNING", _("Running")),
("BLOCKED", _("Blocked")),
("PAUSED", _("Paused")),
("SHUTDOWN", _("Shut Down")),
("SHUTOFF", _("Shut Off")),
("CRASHED", _("Crashed")),
("SUSPENDED", _("Suspended")),
("FAILED", _("Failed")),
("BUILDING", _("Building")),
)
class InstancesFilterAction(tables.FilterAction):
def filter(self, table, instances, filter_string):
"""Naive case-insensitive search."""
q = filter_string.lower()
return [instance for instance in instances
if q in instance.name.lower()]
class InstancesTable(tables.DataTable):
TASK_STATUS_CHOICES = (
(None, True),
("none", True)
)
STATUS_CHOICES = (
("active", True),
("shutoff", True),
("suspended", True),
("paused", True),
("error", False),
("rescue", True),
("shelved offloaded", True),
)
name = tables.Column("name",
link=("horizon:project:instances:detail"),
verbose_name=_("Instance Name"))
image_name = tables.Column("image_name",
verbose_name=_("Image Name"))
ip = tables.Column(get_ips,
verbose_name=_("IP Address"),
attrs={'data-type': "ip"})
size = tables.Column(get_size,
verbose_name=_("Size"),
attrs={'data-type': 'size'})
keypair = tables.Column(get_keyname, verbose_name=_("Key Pair"))
status = tables.Column("status",
filters=(title, filters.replace_underscores),
verbose_name=_("Status"),
status=True,
status_choices=STATUS_CHOICES,
display_choices=STATUS_DISPLAY_CHOICES)
az = tables.Column("availability_zone",
verbose_name=_("Availability Zone"))
task = tables.Column("OS-EXT-STS:task_state",
verbose_name=_("Task"),
filters=(title, filters.replace_underscores),
status=True,
status_choices=TASK_STATUS_CHOICES,
display_choices=TASK_DISPLAY_CHOICES)
state = tables.Column(get_power_state,
filters=(title, filters.replace_underscores),
verbose_name=_("Power State"),
display_choices=POWER_DISPLAY_CHOICES)
created = tables.Column("created",
verbose_name=_("Uptime"),
filters=(filters.parse_isotime,
filters.timesince_sortable),
attrs={'data-type': 'timesince'})
class Meta:
name = "instances"
verbose_name = _("Instances")
status_columns = ["status", "task"]
row_class = UpdateRow
table_actions = (LaunchLink, SoftRebootInstance, TerminateInstance,
InstancesFilterAction)
row_actions = (StartInstance, ConfirmResize, RevertResize,
CreateSnapshot, SimpleAssociateIP, AssociateIP,
SimpleDisassociateIP, EditInstance,
DecryptInstancePassword, EditInstanceSecurityGroups,
ConsoleLink, LogLink, TogglePause, ToggleSuspend,
SoftRebootInstance, RebootInstance,
StopInstance, RebuildInstance, TerminateInstance)
# Bug 1320173: Removed ResizeLink option
|
zhongjingjogy/SmartQQBot
|
refs/heads/master
|
plugins/plugin_tuling.py
|
1
|
#coding=utf-8
import os
import json
import urllib2
import random
from smartqq import (
on_all_message,
on_group_message,
on_private_message,
on_from_uin_message
)
from smartqq import GroupMsg, PrivateMsg
class Chat(object):
key = ""
apiurl = "http://www.tuling123.com/openapi/api?"
def init(self):
pass
def query(self, info):
url = self.apiurl + 'key=' + self.key + '&' + 'info=' + info
re = urllib2.urlopen(url).read()
re_dict = json.loads(re)
return re_dict['text']
def plugin_tuling(msg, bot, *args, **kwargs):
msg_id = random.randint(1, 10000)
response_list = [2488439125, 1705468594, 3931279346]
# print("got from uin in repeat: %d" % msg.from_uin)
if msg.from_uin not in response_list:
return
response = ""
try:
tulingbot = Chat()
response = tulingbot.query(msg.content)
print("response: %s" % response)
except:
print("Failed to query the tuling bot.")
if not response: return
if isinstance(msg, GroupMsg):
bot.send_group_msg(response, msg.from_uin, msg_id)
elif isinstance(msg, PrivateMsg):
bot.send_friend_msg(response, msg.from_uin, msg_id)
if __name__ == "__main__":
chat = Chat()
chat.init()
print chat.query("Hi!")
|
newera912/WeatherTransportationProject
|
refs/heads/master
|
src/main/java/edu/albany/cs/transCorrelation/PICEvent.py
|
1
|
import numpy as np
from math import *
import numpy as np
import random,time
from sets import Set
import itertools,sys
from tqdm import *
def calcDistance(Lat_A, Lng_A, Lat_B, Lng_B):
ra = 6378.140
rb = 6356.755
flatten = (ra - rb) / ra #
rad_lat_A = radians(Lat_A)
rad_lng_A = radians(Lng_A)
rad_lat_B = radians(Lat_B)
rad_lng_B = radians(Lng_B)
pA = atan(rb / ra * tan(rad_lat_A))
pB = atan(rb / ra * tan(rad_lat_B))
xx = acos(sin(pA) * sin(pB) + cos(pA) * cos(pB) * cos(rad_lng_A - rad_lng_B))
c1 = (sin(xx) - xx) * (sin(pA) + sin(pB)) ** 2 / cos(xx / 2) ** 2
try:
c2 = (sin(xx) + xx) * (sin(pA) - sin(pB)) ** 2 / sin(xx / 2) ** 2
except:
print Lat_A, Lng_A, Lat_B, Lng_B
dr = flatten / 8 * (c1 - c2)
distance = ra * (xx + dr)
distance =distance/ 1.609344
return distance
def PIC(weatherEvent,trafficEvent,r,timeThreshold,pair_dist):
pic=0.0
locOnly=0.0
timeOnly=0.0
timeAll=0.0
locAll=0.0
nothing=0.0
for wev in weatherEvent:
for tev in trafficEvent:
# if tev[0]==wev[0]:
# continue
#print "\n\n____________________________Start_______________________________________{} {} {} {}".format(pic,locOnly,timeOnly,nothing)
tempPIC=0.0
pairs=str(min(tev[5],wev[5]))+"_"+str(max(tev[5],wev[5]))
if not pair_dist.has_key(pairs):
continue
if pair_dist[pairs]>r:
if wev[3]<=tev[3] and tev[3]<=wev[4]:
# print "Time...."
timeOnly+=1.0
timeAll+=1.0
elif wev[4]<tev[3] and wev[4]+timeThreshold>=tev[3]:
# print "Time...."
timeOnly+=1.0
timeAll+=1.0
else:
# print "Nothing...."
nothing+=1.0
else:
locAll+=1.0
if wev[3]<=tev[3] and tev[3]<=wev[4]:
# print "Loc and Time...."
tempPIC=1.0
timeAll+=1.0
elif wev[4]<tev[3] and wev[4]+timeThreshold>=tev[3]:
# print "Loc and Time...."
tempPIC=1.0
timeAll+=1.0
else:
# print "Loc ...."
locOnly+=1.0
continue
pic+=tempPIC
#print "________________________________End_____________________________________{} {} {} {}\n\n".format(pic,locOnly,timeOnly,nothing)
# if tempPIC>0.0:
# #print "Dist<",round(pair_dist[pairs]),"Station-ID:",wev[5]%100,wev[3]/1000,wev[3]%1000,"~",wev[4]%1000,"| TMC-ID:",tev[5]%100,tev[3]/1000,tev[3]%1000,"~",tev[4]%1000
# print round(pair_dist[pairs]),wev[5]%100,wev[3]/1000,wev[3]%1000,"~",wev[4]%1000,tev[5],tev[3]/1000,tev[3]%1000,"~",tev[4]%1000
# if pic>timeOnly:
# print wev,tev
# print "{} {} : {} {} {} {} {} {}\n\n".format(r,timeThreshold,pic,locOnly,timeOnly,nothing,timeAll,locAll)
# raw_input("Press Enter to continue...")
# print("Pair match counts\nSatisfy the Location-and-Time constrain:{}\nSatisfy the Only Location constrain:{}\nSatisfy the Only Time constrain{}".format(pic,locOnly,timeOnly))
return pic,locOnly,timeOnly,nothing,timeAll,locAll
def round(x,a):
return np.round(10.0**a*x)/10.0**a
def main():
ite=10
inputFile="RNSimuEvents_Case1.txt"
outputFile="result_"+inputFile
output=open(outputFile,"a+")
timeThresholds=[1,2,3,4,5]
radius=[5,10,15,20,25,30,35,40,45,50,55,60] #5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,
rel_max_dist=np.max(radius)
evetnFileName=inputFile
weatherEvent0=[]
trafficEvent0=[]
timeAll0=[]
locAll0=[]
sta_loc=Set()
tmc_loc=Set()
print "************************************"+evetnFileName+"***************************************"
with open(evetnFileName,"r") as eF:
for line in eF.readlines():
terms=line.strip().split()
#terms=map(int,terms)
if int(terms[0])==0:
weatherEvent0.append((int(terms[0]),float(terms[1]),float(terms[2]),int(terms[3]),int(terms[4]),int(terms[5])))
sta_loc.add((int(terms[5]),float(terms[1]),float(terms[2])))
timeAll0.append((int(terms[3]),int(terms[4])))
locAll0.append((float(terms[1]),float(terms[2])))
else:
trafficEvent0.append((int(terms[0]),float(terms[1]),float(terms[2]),int(terms[3]),int(terms[4]),int(terms[5])))
tmc_loc.add((int(terms[5]),float(terms[1]),float(terms[2])))
timeAll0.append((int(terms[3]),int(terms[4])))
locAll0.append((float(terms[1]),float(terms[2])))
AllEvent=weatherEvent0+trafficEvent0
weatherEventNum=len(weatherEvent0)
trafficEventNum=len(trafficEvent0)
print "Weather Event:",len(weatherEvent0)
print "Traffic Event:",len(trafficEvent0)
pair_dist={}
for (a,b) in itertools.combinations(sta_loc, 2):
if a[1]==b[1] and a[2]==b[2]:
dist=0
else:
dist=calcDistance(a[1], a[2],b[1],b[2])
if dist>rel_max_dist:
continue
pair_dist[str(min(a[0],b[0]))+"_"+str(max(a[0],b[0]))]=dist
for (a,b) in itertools.combinations(tmc_loc, 2):
if a[1]==b[1] and a[2]==b[2]:
dist=0
else:
dist=calcDistance(a[1], a[2],b[1],b[2])
if dist>rel_max_dist:
continue
pair_dist[str(min(a[0],b[0]))+"_"+str(max(a[0],b[0]))]=dist
for (a,b) in list(itertools.product( sta_loc,tmc_loc)):
if a[1]==b[1] and a[2]==b[2]:
dist=0
else:
dist=calcDistance(a[1], a[2],b[1],b[2])
if dist>rel_max_dist:
continue
pair_dist[str(min(a[0],b[0]))+"_"+str(max(a[0],b[0]))]=dist
print "All-pairs",len(pair_dist)
result=np.zeros((len(radius),len(timeThresholds)))
for j,timeThreshold in enumerate(timeThresholds):
for k,r in enumerate(radius):
t0=time.time()
print("r=%d timeRadius=%d "%(r,timeThreshold))
testStatisticsScore,locOnly,timeOnly,nothing,timeAll,locAll=PIC(weatherEvent0,trafficEvent0,r,timeThreshold,pair_dist)
output.write(str(testStatisticsScore)+" "+str(locOnly)+" "+str(locAll)+" "+str(timeOnly)+" "+str(timeAll)+" | ")
output.flush()
above=0.0
for i in tqdm(range(ite)):
tempAll=AllEvent
tempLoc=locAll0
random.shuffle(tempAll)
random.shuffle(tempLoc)
weatherEvent=[]
trafficEvent=[]
for k,event in enumerate(tempAll):
if event[0]==0:
weatherEvent.append((event[0],tempLoc[k][0],tempLoc[k][1],event[3],event[4],event[5]))
else:
trafficEvent.append((event[0],tempLoc[k][0],tempLoc[k][1],event[3],event[4],event[5]))
score,locOnly,timeOnly,nothing,timeAll,locAll=PIC(weatherEvent,trafficEvent,r,timeThreshold,pair_dist)
output.write("("+str(score)+" "+str(locOnly)+" "+str(locAll)+" "+str(timeOnly)+" "+str(timeAll)+") ")
output.flush()
#score=1.0
if testStatisticsScore<=score:
above+=1.0
if i%100==0:
sys.stdout.write('i='+str(i)+" ")
output.write("\n")
output.flush()
result[k][j]=round((1.0*above/ite),3)
sys.stdout.write("\n%d %f %f \n"%(testStatisticsScore,above,1.0*above/ite))
output.write(str(timeThreshold)+" "+str(r)+" "+str(testStatisticsScore)+" "+str(above)+" "+ str(round((1.0*above/ite),3))+"\n")
output.flush()
output.write(results)
output.close()
print result
for d in result:
print d
if __name__ =='__main__':
main()
|
roderickvd/nzbToMedia
|
refs/heads/master
|
libs/unidecode/x084.py
|
252
|
data = (
'Hu ', # 0x00
'Qi ', # 0x01
'He ', # 0x02
'Cui ', # 0x03
'Tao ', # 0x04
'Chun ', # 0x05
'Bei ', # 0x06
'Chang ', # 0x07
'Huan ', # 0x08
'Fei ', # 0x09
'Lai ', # 0x0a
'Qi ', # 0x0b
'Meng ', # 0x0c
'Ping ', # 0x0d
'Wei ', # 0x0e
'Dan ', # 0x0f
'Sha ', # 0x10
'Huan ', # 0x11
'Yan ', # 0x12
'Yi ', # 0x13
'Tiao ', # 0x14
'Qi ', # 0x15
'Wan ', # 0x16
'Ce ', # 0x17
'Nai ', # 0x18
'Kutabireru ', # 0x19
'Tuo ', # 0x1a
'Jiu ', # 0x1b
'Tie ', # 0x1c
'Luo ', # 0x1d
'[?] ', # 0x1e
'[?] ', # 0x1f
'Meng ', # 0x20
'[?] ', # 0x21
'Yaji ', # 0x22
'[?] ', # 0x23
'Ying ', # 0x24
'Ying ', # 0x25
'Ying ', # 0x26
'Xiao ', # 0x27
'Sa ', # 0x28
'Qiu ', # 0x29
'Ke ', # 0x2a
'Xiang ', # 0x2b
'Wan ', # 0x2c
'Yu ', # 0x2d
'Yu ', # 0x2e
'Fu ', # 0x2f
'Lian ', # 0x30
'Xuan ', # 0x31
'Yuan ', # 0x32
'Nan ', # 0x33
'Ze ', # 0x34
'Wo ', # 0x35
'Chun ', # 0x36
'Xiao ', # 0x37
'Yu ', # 0x38
'Pian ', # 0x39
'Mao ', # 0x3a
'An ', # 0x3b
'E ', # 0x3c
'Luo ', # 0x3d
'Ying ', # 0x3e
'Huo ', # 0x3f
'Gua ', # 0x40
'Jiang ', # 0x41
'Mian ', # 0x42
'Zuo ', # 0x43
'Zuo ', # 0x44
'Ju ', # 0x45
'Bao ', # 0x46
'Rou ', # 0x47
'Xi ', # 0x48
'Xie ', # 0x49
'An ', # 0x4a
'Qu ', # 0x4b
'Jian ', # 0x4c
'Fu ', # 0x4d
'Lu ', # 0x4e
'Jing ', # 0x4f
'Pen ', # 0x50
'Feng ', # 0x51
'Hong ', # 0x52
'Hong ', # 0x53
'Hou ', # 0x54
'Yan ', # 0x55
'Tu ', # 0x56
'Zhu ', # 0x57
'Zi ', # 0x58
'Xiang ', # 0x59
'Shen ', # 0x5a
'Ge ', # 0x5b
'Jie ', # 0x5c
'Jing ', # 0x5d
'Mi ', # 0x5e
'Huang ', # 0x5f
'Shen ', # 0x60
'Pu ', # 0x61
'Gai ', # 0x62
'Dong ', # 0x63
'Zhou ', # 0x64
'Qian ', # 0x65
'Wei ', # 0x66
'Bo ', # 0x67
'Wei ', # 0x68
'Pa ', # 0x69
'Ji ', # 0x6a
'Hu ', # 0x6b
'Zang ', # 0x6c
'Jia ', # 0x6d
'Duan ', # 0x6e
'Yao ', # 0x6f
'Jun ', # 0x70
'Cong ', # 0x71
'Quan ', # 0x72
'Wei ', # 0x73
'Xian ', # 0x74
'Kui ', # 0x75
'Ting ', # 0x76
'Hun ', # 0x77
'Xi ', # 0x78
'Shi ', # 0x79
'Qi ', # 0x7a
'Lan ', # 0x7b
'Zong ', # 0x7c
'Yao ', # 0x7d
'Yuan ', # 0x7e
'Mei ', # 0x7f
'Yun ', # 0x80
'Shu ', # 0x81
'Di ', # 0x82
'Zhuan ', # 0x83
'Guan ', # 0x84
'Sukumo ', # 0x85
'Xue ', # 0x86
'Chan ', # 0x87
'Kai ', # 0x88
'Kui ', # 0x89
'[?] ', # 0x8a
'Jiang ', # 0x8b
'Lou ', # 0x8c
'Wei ', # 0x8d
'Pai ', # 0x8e
'[?] ', # 0x8f
'Sou ', # 0x90
'Yin ', # 0x91
'Shi ', # 0x92
'Chun ', # 0x93
'Shi ', # 0x94
'Yun ', # 0x95
'Zhen ', # 0x96
'Lang ', # 0x97
'Nu ', # 0x98
'Meng ', # 0x99
'He ', # 0x9a
'Que ', # 0x9b
'Suan ', # 0x9c
'Yuan ', # 0x9d
'Li ', # 0x9e
'Ju ', # 0x9f
'Xi ', # 0xa0
'Pang ', # 0xa1
'Chu ', # 0xa2
'Xu ', # 0xa3
'Tu ', # 0xa4
'Liu ', # 0xa5
'Wo ', # 0xa6
'Zhen ', # 0xa7
'Qian ', # 0xa8
'Zu ', # 0xa9
'Po ', # 0xaa
'Cuo ', # 0xab
'Yuan ', # 0xac
'Chu ', # 0xad
'Yu ', # 0xae
'Kuai ', # 0xaf
'Pan ', # 0xb0
'Pu ', # 0xb1
'Pu ', # 0xb2
'Na ', # 0xb3
'Shuo ', # 0xb4
'Xi ', # 0xb5
'Fen ', # 0xb6
'Yun ', # 0xb7
'Zheng ', # 0xb8
'Jian ', # 0xb9
'Ji ', # 0xba
'Ruo ', # 0xbb
'Cang ', # 0xbc
'En ', # 0xbd
'Mi ', # 0xbe
'Hao ', # 0xbf
'Sun ', # 0xc0
'Zhen ', # 0xc1
'Ming ', # 0xc2
'Sou ', # 0xc3
'Xu ', # 0xc4
'Liu ', # 0xc5
'Xi ', # 0xc6
'Gu ', # 0xc7
'Lang ', # 0xc8
'Rong ', # 0xc9
'Weng ', # 0xca
'Gai ', # 0xcb
'Cuo ', # 0xcc
'Shi ', # 0xcd
'Tang ', # 0xce
'Luo ', # 0xcf
'Ru ', # 0xd0
'Suo ', # 0xd1
'Xian ', # 0xd2
'Bei ', # 0xd3
'Yao ', # 0xd4
'Gui ', # 0xd5
'Bi ', # 0xd6
'Zong ', # 0xd7
'Gun ', # 0xd8
'Za ', # 0xd9
'Xiu ', # 0xda
'Ce ', # 0xdb
'Hai ', # 0xdc
'Lan ', # 0xdd
'[?] ', # 0xde
'Ji ', # 0xdf
'Li ', # 0xe0
'Can ', # 0xe1
'Lang ', # 0xe2
'Yu ', # 0xe3
'[?] ', # 0xe4
'Ying ', # 0xe5
'Mo ', # 0xe6
'Diao ', # 0xe7
'Tiao ', # 0xe8
'Mao ', # 0xe9
'Tong ', # 0xea
'Zhu ', # 0xeb
'Peng ', # 0xec
'An ', # 0xed
'Lian ', # 0xee
'Cong ', # 0xef
'Xi ', # 0xf0
'Ping ', # 0xf1
'Qiu ', # 0xf2
'Jin ', # 0xf3
'Chun ', # 0xf4
'Jie ', # 0xf5
'Wei ', # 0xf6
'Tui ', # 0xf7
'Cao ', # 0xf8
'Yu ', # 0xf9
'Yi ', # 0xfa
'Ji ', # 0xfb
'Liao ', # 0xfc
'Bi ', # 0xfd
'Lu ', # 0xfe
'Su ', # 0xff
)
|
liosha2007/temporary-groupdocs-python-sdk
|
refs/heads/master
|
groupdocs/models/GetJobResourcesResponse.py
|
1
|
#!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class GetJobResourcesResponse:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'result': 'GetJobResourcesResult',
'status': 'str',
'error_message': 'str',
'composedOn': 'long'
}
self.result = None # GetJobResourcesResult
self.status = None # str
self.error_message = None # str
self.composedOn = None # long
|
Existed/kernel-2.6.35.14
|
refs/heads/master
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py
|
802
|
# Core.py - Python extension for perf trace, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
|
YzPaul3/h2o-3
|
refs/heads/master
|
h2o-docs/src/booklets/v2_2015/source/DeepLearning_Vignette_code_examples/deeplearning_inspect_model.py
|
9
|
# View specified parameters of the Deep Learning model
model.params
# Examine the performance of the trained model
model # display all performance metrics
model.model_performance(train=True) # training metrics
model.model_performance(valid=True) # validation metrics
# Get MSE only
model.mse(valid=True)
# Cross-validated MSE
model_cv.mse(xval=True)
|
redhat-openstack/django
|
refs/heads/epel7-patches
|
tests/signing/models.py
|
754
|
# models.py file for tests to run.
|
longjon/numpy
|
refs/heads/master
|
numpy/fft/tests/test_helper.py
|
45
|
#!/usr/bin/env python
"""Test functions for fftpack.helper module
Copied from fftpack.helper by Pearu Peterson, October 2005
"""
from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.testing import TestCase, run_module_suite, assert_array_almost_equal
from numpy import fft
from numpy import pi
class TestFFTShift(TestCase):
def test_definition(self):
x = [0, 1, 2, 3, 4, -4, -3, -2, -1]
y = [-4, -3, -2, -1, 0, 1, 2, 3, 4]
assert_array_almost_equal(fft.fftshift(x), y)
assert_array_almost_equal(fft.ifftshift(y), x)
x = [0, 1, 2, 3, 4, -5, -4, -3, -2, -1]
y = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
assert_array_almost_equal(fft.fftshift(x), y)
assert_array_almost_equal(fft.ifftshift(y), x)
def test_inverse(self):
for n in [1, 4, 9, 100, 211]:
x = np.random.random((n,))
assert_array_almost_equal(fft.ifftshift(fft.fftshift(x)), x)
def test_axes_keyword(self):
freqs = [[ 0, 1, 2], [ 3, 4, -4], [-3, -2, -1]]
shifted = [[-1, -3, -2], [ 2, 0, 1], [-4, 3, 4]]
assert_array_almost_equal(fft.fftshift(freqs, axes=(0, 1)), shifted)
assert_array_almost_equal(fft.fftshift(freqs, axes=0),
fft.fftshift(freqs, axes=(0,)))
assert_array_almost_equal(fft.ifftshift(shifted, axes=(0, 1)), freqs)
assert_array_almost_equal(fft.ifftshift(shifted, axes=0),
fft.ifftshift(shifted, axes=(0,)))
class TestFFTFreq(TestCase):
def test_definition(self):
x = [0, 1, 2, 3, 4, -4, -3, -2, -1]
assert_array_almost_equal(9*fft.fftfreq(9), x)
assert_array_almost_equal(9*pi*fft.fftfreq(9, pi), x)
x = [0, 1, 2, 3, 4, -5, -4, -3, -2, -1]
assert_array_almost_equal(10*fft.fftfreq(10), x)
assert_array_almost_equal(10*pi*fft.fftfreq(10, pi), x)
class TestRFFTFreq(TestCase):
def test_definition(self):
x = [0, 1, 2, 3, 4]
assert_array_almost_equal(9*fft.rfftfreq(9), x)
assert_array_almost_equal(9*pi*fft.rfftfreq(9, pi), x)
x = [0, 1, 2, 3, 4, 5]
assert_array_almost_equal(10*fft.rfftfreq(10), x)
assert_array_almost_equal(10*pi*fft.rfftfreq(10, pi), x)
class TestIRFFTN(TestCase):
def test_not_last_axis_success(self):
ar, ai = np.random.random((2, 16, 8, 32))
a = ar + 1j*ai
axes = (-2,)
# Should not raise error
fft.irfftn(a, axes=axes)
if __name__ == "__main__":
run_module_suite()
|
vismartltd/edx-platform
|
refs/heads/master
|
lms/djangoapps/instructor_task/tests/test_subtasks.py
|
146
|
"""
Unit tests for instructor_task subtasks.
"""
from uuid import uuid4
from mock import Mock, patch
from student.models import CourseEnrollment
from instructor_task.subtasks import queue_subtasks_for_query
from instructor_task.tests.factories import InstructorTaskFactory
from instructor_task.tests.test_base import InstructorTaskCourseTestCase
class TestSubtasks(InstructorTaskCourseTestCase):
"""Tests for subtasks."""
def setUp(self):
super(TestSubtasks, self).setUp()
self.initialize_course()
def _enroll_students_in_course(self, course_id, num_students):
"""Create and enroll some students in the course."""
for _ in range(num_students):
random_id = uuid4().hex[:8]
self.create_student(username='student{0}'.format(random_id))
def _queue_subtasks(self, create_subtask_fcn, items_per_task, initial_count, extra_count):
"""Queue subtasks while enrolling more students into course in the middle of the process."""
task_id = str(uuid4())
instructor_task = InstructorTaskFactory.create(
course_id=self.course.id,
task_id=task_id,
task_key='dummy_task_key',
task_type='bulk_course_email',
)
self._enroll_students_in_course(self.course.id, initial_count)
task_querysets = [CourseEnrollment.objects.filter(course_id=self.course.id)]
def initialize_subtask_info(*args): # pylint: disable=unused-argument
"""Instead of initializing subtask info enroll some more students into course."""
self._enroll_students_in_course(self.course.id, extra_count)
return {}
with patch('instructor_task.subtasks.initialize_subtask_info') as mock_initialize_subtask_info:
mock_initialize_subtask_info.side_effect = initialize_subtask_info
queue_subtasks_for_query(
entry=instructor_task,
action_name='action_name',
create_subtask_fcn=create_subtask_fcn,
item_querysets=task_querysets,
item_fields=[],
items_per_task=items_per_task,
total_num_items=initial_count,
)
def test_queue_subtasks_for_query1(self):
"""Test queue_subtasks_for_query() if the last subtask only needs to accommodate < items_per_tasks items."""
mock_create_subtask_fcn = Mock()
self._queue_subtasks(mock_create_subtask_fcn, 3, 7, 1)
# Check number of items for each subtask
mock_create_subtask_fcn_args = mock_create_subtask_fcn.call_args_list
self.assertEqual(len(mock_create_subtask_fcn_args[0][0][0]), 3)
self.assertEqual(len(mock_create_subtask_fcn_args[1][0][0]), 3)
self.assertEqual(len(mock_create_subtask_fcn_args[2][0][0]), 2)
def test_queue_subtasks_for_query2(self):
"""Test queue_subtasks_for_query() if the last subtask needs to accommodate > items_per_task items."""
mock_create_subtask_fcn = Mock()
self._queue_subtasks(mock_create_subtask_fcn, 3, 8, 3)
# Check number of items for each subtask
mock_create_subtask_fcn_args = mock_create_subtask_fcn.call_args_list
self.assertEqual(len(mock_create_subtask_fcn_args[0][0][0]), 3)
self.assertEqual(len(mock_create_subtask_fcn_args[1][0][0]), 3)
self.assertEqual(len(mock_create_subtask_fcn_args[2][0][0]), 5)
|
mattiamaestrini/spotipy
|
refs/heads/master
|
examples/simple1.py
|
10
|
import spotipy
birdy_uri = 'spotify:artist:2WX2uTcsvV5OnS0inACecP'
spotify = spotipy.Spotify()
results = spotify.artist_albums(birdy_uri, album_type='album')
albums = results['items']
while results['next']:
results = spotify.next(results)
albums.extend(results['items'])
for album in albums:
print((album['name']))
|
varunagrawal/yowsup
|
refs/heads/master
|
yowsup/layers/protocol_media/protocolentities/test_message_media_location.py
|
69
|
from yowsup.layers.protocol_media.protocolentities.test_message_media import MediaMessageProtocolEntityTest
from yowsup.layers.protocol_media.protocolentities import LocationMediaMessageProtocolEntity
from yowsup.structs import ProtocolTreeNode
class LocationMediaMessageProtocolEntityTest(MediaMessageProtocolEntityTest):
def setUp(self):
super(LocationMediaMessageProtocolEntityTest, self).setUp()
self.ProtocolEntity = LocationMediaMessageProtocolEntity
mediaNode = self.node.getChild("media")
mediaNode["type"] = "location"
mediaNode["latitude"] = "52.52393"
mediaNode["longitude"] = "13.41747"
mediaNode["encoding"] = "raw"
|
adelina-t/nova
|
refs/heads/master
|
nova/cmd/novncproxy.py
|
38
|
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Websocket proxy that is compatible with OpenStack Nova
noVNC consoles. Leverages websockify.py by Joel Martin
"""
import sys
from oslo_config import cfg
from nova.cmd import baseproxy
from nova import config
opts = [
cfg.StrOpt('novncproxy_host',
default='0.0.0.0',
help='Host on which to listen for incoming requests'),
cfg.IntOpt('novncproxy_port',
default=6080,
help='Port on which to listen for incoming requests'),
]
CONF = cfg.CONF
CONF.register_cli_opts(opts)
def main():
# set default web flag option
CONF.set_default('web', '/usr/share/novnc')
config.parse_args(sys.argv)
baseproxy.proxy(
host=CONF.novncproxy_host,
port=CONF.novncproxy_port)
|
conorpp/napkis
|
refs/heads/master
|
napkis/deployment/python2.7/django/middleware/http.py
|
225
|
from django.utils.http import http_date, parse_http_date_safe
class ConditionalGetMiddleware(object):
"""
Handles conditional GET operations. If the response has a ETag or
Last-Modified header, and the request has If-None-Match or
If-Modified-Since, the response is replaced by an HttpNotModified.
Also sets the Date and Content-Length response-headers.
"""
def process_response(self, request, response):
response['Date'] = http_date()
if not response.streaming and not response.has_header('Content-Length'):
response['Content-Length'] = str(len(response.content))
if response.has_header('ETag'):
if_none_match = request.META.get('HTTP_IF_NONE_MATCH')
if if_none_match == response['ETag']:
# Setting the status is enough here. The response handling path
# automatically removes content for this status code (in
# http.conditional_content_removal()).
response.status_code = 304
if response.has_header('Last-Modified'):
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
if if_modified_since is not None:
if_modified_since = parse_http_date_safe(if_modified_since)
if if_modified_since is not None:
last_modified = parse_http_date_safe(response['Last-Modified'])
if last_modified is not None and last_modified <= if_modified_since:
# Setting the status code is enough here (same reasons as
# above).
response.status_code = 304
return response
|
KiChjang/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/executors/base.py
|
4
|
import base64
import hashlib
import io
import json
import os
import threading
import traceback
import socket
import sys
from abc import ABCMeta, abstractmethod
from http.client import HTTPConnection
from urllib.parse import urljoin, urlsplit, urlunsplit
from .actions import actions
from .protocol import Protocol, BaseProtocolPart
here = os.path.dirname(__file__)
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
**kwargs):
timeout_multiplier = kwargs["timeout_multiplier"]
if timeout_multiplier is None:
timeout_multiplier = 1
executor_kwargs = {"server_config": server_config,
"timeout_multiplier": timeout_multiplier,
"debug_info": kwargs["debug_info"]}
if test_type in ("reftest", "print-reftest"):
executor_kwargs["screenshot_cache"] = cache_manager.dict()
if test_type == "wdspec":
executor_kwargs["binary"] = kwargs.get("binary")
executor_kwargs["webdriver_binary"] = kwargs.get("webdriver_binary")
executor_kwargs["webdriver_args"] = kwargs.get("webdriver_args")
# By default the executor may try to cleanup windows after a test (to best
# associate any problems with the test causing them). If the user might
# want to view the results, however, the executor has to skip that cleanup.
if kwargs["pause_after_test"] or kwargs["pause_on_unexpected"]:
executor_kwargs["cleanup_after_test"] = False
executor_kwargs["debug_test"] = kwargs["debug_test"]
return executor_kwargs
def strip_server(url):
"""Remove the scheme and netloc from a url, leaving only the path and any query
or fragment.
url - the url to strip
e.g. http://example.org:8000/tests?id=1#2 becomes /tests?id=1#2"""
url_parts = list(urlsplit(url))
url_parts[0] = ""
url_parts[1] = ""
return urlunsplit(url_parts)
class TestharnessResultConverter(object):
harness_codes = {0: "OK",
1: "ERROR",
2: "TIMEOUT",
3: "PRECONDITION_FAILED"}
test_codes = {0: "PASS",
1: "FAIL",
2: "TIMEOUT",
3: "NOTRUN",
4: "PRECONDITION_FAILED"}
def __call__(self, test, result, extra=None):
"""Convert a JSON result into a (TestResult, [SubtestResult]) tuple"""
result_url, status, message, stack, subtest_results = result
assert result_url == test.url, ("Got results from %s, expected %s" %
(result_url, test.url))
harness_result = test.result_cls(self.harness_codes[status], message, extra=extra, stack=stack)
return (harness_result,
[test.subtest_result_cls(st_name, self.test_codes[st_status], st_message, st_stack)
for st_name, st_status, st_message, st_stack in subtest_results])
testharness_result_converter = TestharnessResultConverter()
def hash_screenshots(screenshots):
"""Computes the sha1 checksum of a list of base64-encoded screenshots."""
return [hashlib.sha1(base64.b64decode(screenshot)).hexdigest()
for screenshot in screenshots]
def _ensure_hash_in_reftest_screenshots(extra):
"""Make sure reftest_screenshots have hashes.
Marionette internal reftest runner does not produce hashes.
"""
log_data = extra.get("reftest_screenshots")
if not log_data:
return
for item in log_data:
if type(item) != dict:
# Skip relation strings.
continue
if "hash" not in item:
item["hash"] = hash_screenshots([item["screenshot"]])[0]
def get_pages(ranges_value, total_pages):
"""Get a set of page numbers to include in a print reftest.
:param ranges_value: Parsed page ranges as a list e.g. [[1,2], [4], [6,None]]
:param total_pages: Integer total number of pages in the paginated output.
:retval: Set containing integer page numbers to include in the comparison e.g.
for the example ranges value and 10 total pages this would be
{1,2,4,6,7,8,9,10}"""
if not ranges_value:
return set(range(1, total_pages + 1))
rv = set()
for range_limits in ranges_value:
if len(range_limits) == 1:
range_limits = [range_limits[0], range_limits[0]]
if range_limits[0] is None:
range_limits[0] = 1
if range_limits[1] is None:
range_limits[1] = total_pages
if range_limits[0] > total_pages:
continue
rv |= set(range(range_limits[0], range_limits[1] + 1))
return rv
def reftest_result_converter(self, test, result):
extra = result.get("extra", {})
_ensure_hash_in_reftest_screenshots(extra)
return (test.result_cls(
result["status"],
result["message"],
extra=extra,
stack=result.get("stack")), [])
def pytest_result_converter(self, test, data):
harness_data, subtest_data = data
if subtest_data is None:
subtest_data = []
harness_result = test.result_cls(*harness_data)
subtest_results = [test.subtest_result_cls(*item) for item in subtest_data]
return (harness_result, subtest_results)
def crashtest_result_converter(self, test, result):
return test.result_cls(**result), []
class ExecutorException(Exception):
def __init__(self, status, message):
self.status = status
self.message = message
class TimedRunner(object):
def __init__(self, logger, func, protocol, url, timeout, extra_timeout):
self.func = func
self.logger = logger
self.result = None
self.protocol = protocol
self.url = url
self.timeout = timeout
self.extra_timeout = extra_timeout
self.result_flag = threading.Event()
def run(self):
for setup_fn in [self.set_timeout, self.before_run]:
err = setup_fn()
if err:
self.result = (False, err)
return self.result
executor = threading.Thread(target=self.run_func)
executor.start()
# Add twice the extra timeout since the called function is expected to
# wait at least self.timeout + self.extra_timeout and this gives some leeway
timeout = self.timeout + 2 * self.extra_timeout if self.timeout else None
finished = self.result_flag.wait(timeout)
if self.result is None:
if finished:
# flag is True unless we timeout; this *shouldn't* happen, but
# it can if self.run_func fails to set self.result due to raising
self.result = False, ("INTERNAL-ERROR", "%s.run_func didn't set a result" %
self.__class__.__name__)
else:
if self.protocol.is_alive():
message = "Executor hit external timeout (this may indicate a hang)\n"
# get a traceback for the current stack of the executor thread
message += "".join(traceback.format_stack(sys._current_frames()[executor.ident]))
self.result = False, ("EXTERNAL-TIMEOUT", message)
else:
self.logger.info("Browser not responding, setting status to CRASH")
self.result = False, ("CRASH", None)
elif self.result[1] is None:
# We didn't get any data back from the test, so check if the
# browser is still responsive
if self.protocol.is_alive():
self.result = False, ("INTERNAL-ERROR", None)
else:
self.logger.info("Browser not responding, setting status to CRASH")
self.result = False, ("CRASH", None)
return self.result
def set_timeout(self):
raise NotImplementedError
def before_run(self):
pass
def run_func(self):
raise NotImplementedError
class TestExecutor(object):
"""Abstract Base class for object that actually executes the tests in a
specific browser. Typically there will be a different TestExecutor
subclass for each test type and method of executing tests.
:param browser: ExecutorBrowser instance providing properties of the
browser that will be tested.
:param server_config: Dictionary of wptserve server configuration of the
form stored in TestEnvironment.config
:param timeout_multiplier: Multiplier relative to base timeout to use
when setting test timeout.
"""
__metaclass__ = ABCMeta
test_type = None
convert_result = None
supports_testdriver = False
supports_jsshell = False
# Extra timeout to use after internal test timeout at which the harness
# should force a timeout
extra_timeout = 5 # seconds
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
debug_info=None, **kwargs):
self.logger = logger
self.runner = None
self.browser = browser
self.server_config = server_config
self.timeout_multiplier = timeout_multiplier
self.debug_info = debug_info
self.last_environment = {"protocol": "http",
"prefs": {}}
self.protocol = None # This must be set in subclasses
def setup(self, runner):
"""Run steps needed before tests can be started e.g. connecting to
browser instance
:param runner: TestRunner instance that is going to run the tests"""
self.runner = runner
if self.protocol is not None:
self.protocol.setup(runner)
def teardown(self):
"""Run cleanup steps after tests have finished"""
if self.protocol is not None:
self.protocol.teardown()
def reset(self):
"""Re-initialize internal state to facilitate repeated test execution
as implemented by the `--rerun` command-line argument."""
pass
def run_test(self, test):
"""Run a particular test.
:param test: The test to run"""
try:
if test.environment != self.last_environment:
self.on_environment_change(test.environment)
result = self.do_test(test)
except Exception as e:
exception_string = traceback.format_exc()
self.logger.warning(exception_string)
result = self.result_from_exception(test, e, exception_string)
# log result of parent test
if result[0].status == "ERROR":
self.logger.debug(result[0].message)
self.last_environment = test.environment
self.runner.send_message("test_ended", test, result)
def server_url(self, protocol, subdomain=False):
scheme = "https" if protocol == "h2" else protocol
host = self.server_config["browser_host"]
if subdomain:
# The only supported subdomain filename flag is "www".
host = "{subdomain}.{host}".format(subdomain="www", host=host)
return "{scheme}://{host}:{port}".format(scheme=scheme, host=host,
port=self.server_config["ports"][protocol][0])
def test_url(self, test):
return urljoin(self.server_url(test.environment["protocol"],
test.subdomain), test.url)
@abstractmethod
def do_test(self, test):
"""Test-type and protocol specific implementation of running a
specific test.
:param test: The test to run."""
pass
def on_environment_change(self, new_environment):
pass
def result_from_exception(self, test, e, exception_string):
if hasattr(e, "status") and e.status in test.result_cls.statuses:
status = e.status
else:
status = "INTERNAL-ERROR"
message = str(getattr(e, "message", ""))
if message:
message += "\n"
message += exception_string
return test.result_cls(status, message), []
def wait(self):
self.protocol.base.wait()
class TestharnessExecutor(TestExecutor):
convert_result = testharness_result_converter
class RefTestExecutor(TestExecutor):
convert_result = reftest_result_converter
is_print = False
def __init__(self, logger, browser, server_config, timeout_multiplier=1, screenshot_cache=None,
debug_info=None, **kwargs):
TestExecutor.__init__(self, logger, browser, server_config,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.screenshot_cache = screenshot_cache
class CrashtestExecutor(TestExecutor):
convert_result = crashtest_result_converter
class PrintRefTestExecutor(TestExecutor):
convert_result = reftest_result_converter
is_print = True
class RefTestImplementation(object):
def __init__(self, executor):
self.timeout_multiplier = executor.timeout_multiplier
self.executor = executor
# Cache of url:(screenshot hash, screenshot). Typically the
# screenshot is None, but we set this value if a test fails
# and the screenshot was taken from the cache so that we may
# retrieve the screenshot from the cache directly in the future
self.screenshot_cache = self.executor.screenshot_cache
self.message = None
def setup(self):
pass
def teardown(self):
pass
@property
def logger(self):
return self.executor.logger
def get_hash(self, test, viewport_size, dpi, page_ranges):
key = (test.url, viewport_size, dpi)
if key not in self.screenshot_cache:
success, data = self.get_screenshot_list(test, viewport_size, dpi, page_ranges)
if not success:
return False, data
screenshots = data
hash_values = hash_screenshots(data)
self.screenshot_cache[key] = (hash_values, screenshots)
rv = (hash_values, screenshots)
else:
rv = self.screenshot_cache[key]
self.message.append("%s %s" % (test.url, rv[0]))
return True, rv
def reset(self):
self.screenshot_cache.clear()
def check_pass(self, hashes, screenshots, urls, relation, fuzzy):
"""Check if a test passes, and return a tuple of (pass, page_idx),
where page_idx is the zero-based index of the first page on which a
difference occurs if any, or None if there are no differences"""
assert relation in ("==", "!=")
lhs_hashes, rhs_hashes = hashes
lhs_screenshots, rhs_screenshots = screenshots
if len(lhs_hashes) != len(rhs_hashes):
self.logger.info("Got different number of pages")
return False
assert len(lhs_screenshots) == len(lhs_hashes) == len(rhs_screenshots) == len(rhs_hashes)
for (page_idx, (lhs_hash,
rhs_hash,
lhs_screenshot,
rhs_screenshot)) in enumerate(zip(lhs_hashes,
rhs_hashes,
lhs_screenshots,
rhs_screenshots)):
comparison_screenshots = (lhs_screenshot, rhs_screenshot)
if not fuzzy or fuzzy == ((0, 0), (0, 0)):
equal = lhs_hash == rhs_hash
# sometimes images can have different hashes, but pixels can be identical.
if not equal:
self.logger.info("Image hashes didn't match%s, checking pixel differences" %
("" if len(hashes) == 1 else " on page %i" % (page_idx + 1)))
max_per_channel, pixels_different = self.get_differences(comparison_screenshots,
urls)
equal = pixels_different == 0 and max_per_channel == 0
else:
max_per_channel, pixels_different = self.get_differences(comparison_screenshots,
urls,
page_idx if len(hashes) > 1 else None)
allowed_per_channel, allowed_different = fuzzy
self.logger.info("Allowed %s pixels different, maximum difference per channel %s" %
("-".join(str(item) for item in allowed_different),
"-".join(str(item) for item in allowed_per_channel)))
equal = ((pixels_different == 0 and allowed_different[0] == 0) or
(max_per_channel == 0 and allowed_per_channel[0] == 0) or
(allowed_per_channel[0] <= max_per_channel <= allowed_per_channel[1] and
allowed_different[0] <= pixels_different <= allowed_different[1]))
if not equal:
return (False if relation == "==" else True, page_idx)
# All screenshots were equal within the fuzziness
return (True if relation == "==" else False, None)
def get_differences(self, screenshots, urls, page_idx=None):
from PIL import Image, ImageChops, ImageStat
lhs = Image.open(io.BytesIO(base64.b64decode(screenshots[0]))).convert("RGB")
rhs = Image.open(io.BytesIO(base64.b64decode(screenshots[1]))).convert("RGB")
self.check_if_solid_color(lhs, urls[0])
self.check_if_solid_color(rhs, urls[1])
diff = ImageChops.difference(lhs, rhs)
minimal_diff = diff.crop(diff.getbbox())
mask = minimal_diff.convert("L", dither=None)
stat = ImageStat.Stat(minimal_diff, mask)
per_channel = max(item[1] for item in stat.extrema)
count = stat.count[0]
self.logger.info("Found %s pixels different, maximum difference per channel %s%s" %
(count,
per_channel,
"" if page_idx is None else " on page %i" % (page_idx + 1)))
return per_channel, count
def check_if_solid_color(self, image, url):
extrema = image.getextrema()
if all(min == max for min, max in extrema):
color = ''.join('%02X' % value for value, _ in extrema)
self.message.append("Screenshot is solid color 0x%s for %s\n" % (color, url))
def run_test(self, test):
viewport_size = test.viewport_size
dpi = test.dpi
page_ranges = test.page_ranges
self.message = []
# Depth-first search of reference tree, with the goal
# of reachings a leaf node with only pass results
stack = list(((test, item[0]), item[1]) for item in reversed(test.references))
page_idx = None
while stack:
hashes = [None, None]
screenshots = [None, None]
urls = [None, None]
nodes, relation = stack.pop()
fuzzy = self.get_fuzzy(test, nodes, relation)
for i, node in enumerate(nodes):
success, data = self.get_hash(node, viewport_size, dpi, page_ranges)
if success is False:
return {"status": data[0], "message": data[1]}
hashes[i], screenshots[i] = data
urls[i] = node.url
is_pass, page_idx = self.check_pass(hashes, screenshots, urls, relation, fuzzy)
if is_pass:
fuzzy = self.get_fuzzy(test, nodes, relation)
if nodes[1].references:
stack.extend(list(((nodes[1], item[0]), item[1])
for item in reversed(nodes[1].references)))
else:
# We passed
return {"status": "PASS", "message": None}
# We failed, so construct a failure message
if page_idx is None:
# default to outputting the last page
page_idx = -1
for i, (node, screenshot) in enumerate(zip(nodes, screenshots)):
if screenshot is None:
success, screenshot = self.retake_screenshot(node, viewport_size, dpi, page_ranges)
if success:
screenshots[i] = screenshot
log_data = [
{"url": nodes[0].url,
"screenshot": screenshots[0][page_idx],
"hash": hashes[0][page_idx]},
relation,
{"url": nodes[1].url,
"screenshot": screenshots[1][page_idx],
"hash": hashes[1][page_idx]},
]
return {"status": "FAIL",
"message": "\n".join(self.message),
"extra": {"reftest_screenshots": log_data}}
def get_fuzzy(self, root_test, test_nodes, relation):
full_key = tuple([item.url for item in test_nodes] + [relation])
ref_only_key = test_nodes[1].url
fuzzy_override = root_test.fuzzy_override
fuzzy = test_nodes[0].fuzzy
sources = [fuzzy_override, fuzzy]
keys = [full_key, ref_only_key, None]
value = None
for source in sources:
for key in keys:
if key in source:
value = source[key]
break
if value:
break
return value
def retake_screenshot(self, node, viewport_size, dpi, page_ranges):
success, data = self.get_screenshot_list(node,
viewport_size,
dpi,
page_ranges)
if not success:
return False, data
key = (node.url, viewport_size, dpi)
hash_val, _ = self.screenshot_cache[key]
self.screenshot_cache[key] = hash_val, data
return True, data
def get_screenshot_list(self, node, viewport_size, dpi, page_ranges):
success, data = self.executor.screenshot(node, viewport_size, dpi, page_ranges)
if success and not isinstance(data, list):
return success, [data]
return success, data
class WdspecExecutor(TestExecutor):
convert_result = pytest_result_converter
protocol_cls = None
def __init__(self, logger, browser, server_config, webdriver_binary,
webdriver_args, timeout_multiplier=1, capabilities=None,
debug_info=None, environ=None, **kwargs):
self.do_delayed_imports()
TestExecutor.__init__(self, logger, browser, server_config,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.webdriver_binary = webdriver_binary
self.webdriver_args = webdriver_args
self.timeout_multiplier = timeout_multiplier
self.capabilities = capabilities
self.environ = environ if environ is not None else {}
self.protocol = self.protocol_cls(self, browser)
def is_alive(self):
return self.protocol.is_alive()
def on_environment_change(self, new_environment):
pass
def do_test(self, test):
timeout = test.timeout * self.timeout_multiplier + self.extra_timeout
success, data = WdspecRun(self.do_wdspec,
self.protocol.session_config,
test.abs_path,
timeout).run()
if success:
return self.convert_result(test, data)
return (test.result_cls(*data), [])
def do_wdspec(self, session_config, path, timeout):
return pytestrunner.run(path,
self.server_config,
session_config,
timeout=timeout,
environ=self.environ)
def do_delayed_imports(self):
global pytestrunner
from . import pytestrunner
class WdspecRun(object):
def __init__(self, func, session, path, timeout):
self.func = func
self.result = (None, None)
self.session = session
self.path = path
self.timeout = timeout
self.result_flag = threading.Event()
def run(self):
"""Runs function in a thread and interrupts it if it exceeds the
given timeout. Returns (True, (Result, [SubtestResult ...])) in
case of success, or (False, (status, extra information)) in the
event of failure.
"""
executor = threading.Thread(target=self._run)
executor.start()
self.result_flag.wait(self.timeout)
if self.result[1] is None:
self.result = False, ("EXTERNAL-TIMEOUT", None)
return self.result
def _run(self):
try:
self.result = True, self.func(self.session, self.path, self.timeout)
except (socket.timeout, IOError):
self.result = False, ("CRASH", None)
except Exception as e:
message = getattr(e, "message")
if message:
message += "\n"
message += traceback.format_exc()
self.result = False, ("INTERNAL-ERROR", message)
finally:
self.result_flag.set()
class ConnectionlessBaseProtocolPart(BaseProtocolPart):
def load(self, url):
pass
def execute_script(self, script, asynchronous=False):
pass
def set_timeout(self, timeout):
pass
def wait(self):
pass
def set_window(self, handle):
pass
def window_handles(self):
return []
class ConnectionlessProtocol(Protocol):
implements = [ConnectionlessBaseProtocolPart]
def connect(self):
pass
def after_connect(self):
pass
class WdspecProtocol(Protocol):
server_cls = None
implements = [ConnectionlessBaseProtocolPart]
def __init__(self, executor, browser):
Protocol.__init__(self, executor, browser)
self.webdriver_binary = executor.webdriver_binary
self.webdriver_args = executor.webdriver_args
self.capabilities = self.executor.capabilities
self.session_config = None
self.server = None
def connect(self):
"""Connect to browser via the HTTP server."""
self.server = self.server_cls(
self.logger,
binary=self.webdriver_binary,
args=self.webdriver_args)
self.server.start(block=False)
self.logger.info(
"WebDriver HTTP server listening at %s" % self.server.url)
self.session_config = {"host": self.server.host,
"port": self.server.port,
"capabilities": self.capabilities}
def after_connect(self):
pass
def teardown(self):
if self.server is not None and self.server.is_alive():
self.server.stop()
def is_alive(self):
"""Test that the connection is still alive.
Because the remote communication happens over HTTP we need to
make an explicit request to the remote. It is allowed for
WebDriver spec tests to not have a WebDriver session, since this
may be what is tested.
An HTTP request to an invalid path that results in a 404 is
proof enough to us that the server is alive and kicking.
"""
conn = HTTPConnection(self.server.host, self.server.port)
conn.request("HEAD", self.server.base_path + "invalid")
res = conn.getresponse()
return res.status == 404
class CallbackHandler(object):
"""Handle callbacks from testdriver-using tests.
The default implementation here makes sense for things that are roughly like
WebDriver. Things that are more different to WebDriver may need to create a
fully custom implementation."""
unimplemented_exc = (NotImplementedError,)
def __init__(self, logger, protocol, test_window):
self.protocol = protocol
self.test_window = test_window
self.logger = logger
self.callbacks = {
"action": self.process_action,
"complete": self.process_complete
}
self.actions = {cls.name: cls(self.logger, self.protocol) for cls in actions}
def __call__(self, result):
url, command, payload = result
self.logger.debug("Got async callback: %s" % result[1])
try:
callback = self.callbacks[command]
except KeyError:
raise ValueError("Unknown callback type %r" % result[1])
return callback(url, payload)
def process_complete(self, url, payload):
rv = [strip_server(url)] + payload
return True, rv
def process_action(self, url, payload):
action = payload["action"]
cmd_id = payload["id"]
self.logger.debug("Got action: %s" % action)
try:
action_handler = self.actions[action]
except KeyError:
raise ValueError("Unknown action %s" % action)
try:
with ActionContext(self.logger, self.protocol, payload.get("context")):
result = action_handler(payload)
except self.unimplemented_exc:
self.logger.warning("Action %s not implemented" % action)
self._send_message(cmd_id, "complete", "error", "Action %s not implemented" % action)
except Exception:
self.logger.warning("Action %s failed" % action)
self.logger.warning(traceback.format_exc())
self._send_message(cmd_id, "complete", "error")
raise
else:
self.logger.debug("Action %s completed with result %s" % (action, result))
return_message = {"result": result}
self._send_message(cmd_id, "complete", "success", json.dumps(return_message))
return False, None
def _send_message(self, cmd_id, message_type, status, message=None):
self.protocol.testdriver.send_message(cmd_id, message_type, status, message=message)
class ActionContext(object):
def __init__(self, logger, protocol, context):
self.logger = logger
self.protocol = protocol
self.context = context
self.initial_window = None
def __enter__(self):
if self.context is None:
return
self.initial_window = self.protocol.base.current_window
self.logger.debug("Switching to window %s" % self.context)
self.protocol.testdriver.switch_to_window(self.context)
def __exit__(self, *args):
if self.context is None:
return
self.logger.debug("Switching back to initial window")
self.protocol.base.set_window(self.initial_window)
self.protocol.testdriver._switch_to_frame(None)
self.initial_window = None
|
MFoster/breeze
|
refs/heads/master
|
django/core/mail/backends/console.py
|
137
|
"""
Email backend that writes messages to console instead of sending them.
"""
import sys
import threading
from django.core.mail.backends.base import BaseEmailBackend
class EmailBackend(BaseEmailBackend):
def __init__(self, *args, **kwargs):
self.stream = kwargs.pop('stream', sys.stdout)
self._lock = threading.RLock()
super(EmailBackend, self).__init__(*args, **kwargs)
def send_messages(self, email_messages):
"""Write all messages to the stream in a thread-safe way."""
if not email_messages:
return
with self._lock:
try:
stream_created = self.open()
for message in email_messages:
self.stream.write('%s\n' % message.message().as_string())
self.stream.write('-' * 79)
self.stream.write('\n')
self.stream.flush() # flush after each message
if stream_created:
self.close()
except:
if not self.fail_silently:
raise
return len(email_messages)
|
wilebeast/FireFox-OS
|
refs/heads/master
|
B2G/gecko/xpcom/idl-parser/header.py
|
2
|
#!/usr/bin/env python
# header.py - Generate C++ header files from IDL.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Print a C++ header file for the IDL files specified on the command line"""
import sys, os.path, re, xpidl, itertools, glob
printdoccomments = False
if printdoccomments:
def printComments(fd, clist, indent):
for c in clist:
fd.write("%s%s\n" % (indent, c))
else:
def printComments(fd, clist, indent):
pass
def firstCap(str):
return str[0].upper() + str[1:]
def attributeParamName(a):
return "a" + firstCap(a.name)
def attributeParamNames(a):
l = [attributeParamName(a)]
if a.implicit_jscontext:
l.insert(0, "cx")
return ", ".join(l)
def attributeNativeName(a, getter):
binaryname = a.binaryname is not None and a.binaryname or firstCap(a.name)
return "%s%s" % (getter and 'Get' or 'Set', binaryname)
def attributeReturnType(a, macro):
"""macro should be NS_IMETHOD or NS_IMETHODIMP"""
if (a.nostdcall):
return macro == "NS_IMETHOD" and "virtual nsresult" or "nsresult"
else:
return macro
def attributeParamlist(a, getter):
l = ["%s%s" % (a.realtype.nativeType(getter and 'out' or 'in'),
attributeParamName(a))]
if a.implicit_jscontext:
l.insert(0, "JSContext* cx")
return ", ".join(l)
def attributeAsNative(a, getter):
deprecated = a.deprecated and "NS_DEPRECATED " or ""
params = {'deprecated': deprecated,
'returntype': attributeReturnType(a, 'NS_IMETHOD'),
'binaryname': attributeNativeName(a, getter),
'paramlist': attributeParamlist(a, getter)}
return "%(deprecated)s%(returntype)s %(binaryname)s(%(paramlist)s)" % params
def methodNativeName(m):
return m.binaryname is not None and m.binaryname or firstCap(m.name)
def methodReturnType(m, macro):
"""macro should be NS_IMETHOD or NS_IMETHODIMP"""
if m.nostdcall and m.notxpcom:
return "%s%s" % (macro == "NS_IMETHOD" and "virtual " or "",
m.realtype.nativeType('in').strip())
elif m.nostdcall:
return "%snsresult" % (macro == "NS_IMETHOD" and "virtual " or "")
elif m.notxpcom:
return "%s_(%s)" % (macro, m.realtype.nativeType('in').strip())
else:
return macro
def methodAsNative(m):
return "%s %s(%s)" % (methodReturnType(m, 'NS_IMETHOD'),
methodNativeName(m),
paramlistAsNative(m))
def paramlistAsNative(m, empty='void'):
l = [paramAsNative(p) for p in m.params]
if m.implicit_jscontext:
l.append("JSContext* cx")
if m.optional_argc:
l.append('uint8_t _argc')
if not m.notxpcom and m.realtype.name != 'void':
l.append(paramAsNative(xpidl.Param(paramtype='out',
type=None,
name='_retval',
attlist=[],
location=None,
realtype=m.realtype)))
if len(l) == 0:
return empty
return ", ".join(l)
def paramAsNative(p):
return "%s%s" % (p.nativeType(),
p.name)
def paramlistNames(m):
names = [p.name for p in m.params]
if m.implicit_jscontext:
names.append('cx')
if m.optional_argc:
names.append('_argc')
if not m.notxpcom and m.realtype.name != 'void':
names.append('_retval')
if len(names) == 0:
return ''
return ', '.join(names)
header = """/*
* DO NOT EDIT. THIS FILE IS GENERATED FROM %(filename)s
*/
#ifndef __gen_%(basename)s_h__
#define __gen_%(basename)s_h__
"""
include = """
#ifndef __gen_%(basename)s_h__
#include "%(basename)s.h"
#endif
"""
jspubtd_include = """
#include "jspubtd.h"
"""
infallible_includes = """
#include "mozilla/Assertions.h"
#include "mozilla/Util.h"
"""
header_end = """/* For IDL files that don't want to include root IDL files. */
#ifndef NS_NO_VTABLE
#define NS_NO_VTABLE
#endif
"""
footer = """
#endif /* __gen_%(basename)s_h__ */
"""
forward_decl = """class %(name)s; /* forward declaration */
"""
def idl_basename(f):
"""returns the base name of a file with the last extension stripped"""
return os.path.basename(f).rpartition('.')[0]
def print_header(idl, fd, filename):
fd.write(header % {'filename': filename,
'basename': idl_basename(filename)})
foundinc = False
for inc in idl.includes():
if not foundinc:
foundinc = True
fd.write('\n')
fd.write(include % {'basename': idl_basename(inc.filename)})
if idl.needsJSTypes():
fd.write(jspubtd_include)
# Include some extra files if any attributes are infallible.
for iface in [p for p in idl.productions if p.kind == 'interface']:
for attr in [m for m in iface.members if isinstance(m, xpidl.Attribute)]:
if attr.infallible:
fd.write(infallible_includes)
break
fd.write('\n')
fd.write(header_end)
for p in idl.productions:
if p.kind == 'include': continue
if p.kind == 'cdata':
fd.write(p.data)
continue
if p.kind == 'forward':
fd.write(forward_decl % {'name': p.name})
continue
if p.kind == 'interface':
write_interface(p, fd)
continue
if p.kind == 'typedef':
printComments(fd, p.doccomments, '')
fd.write("typedef %s %s;\n\n" % (p.realtype.nativeType('in'),
p.name))
fd.write(footer % {'basename': idl_basename(filename)})
iface_header = r"""
/* starting interface: %(name)s */
#define %(defname)s_IID_STR "%(iid)s"
#define %(defname)s_IID \
{0x%(m0)s, 0x%(m1)s, 0x%(m2)s, \
{ %(m3joined)s }}
"""
uuid_decoder = re.compile(r"""(?P<m0>[a-f0-9]{8})-
(?P<m1>[a-f0-9]{4})-
(?P<m2>[a-f0-9]{4})-
(?P<m3>[a-f0-9]{4})-
(?P<m4>[a-f0-9]{12})$""", re.X)
iface_prolog = """ {
public:
NS_DECLARE_STATIC_IID_ACCESSOR(%(defname)s_IID)
"""
iface_epilog = """};
NS_DEFINE_STATIC_IID_ACCESSOR(%(name)s, %(defname)s_IID)
/* Use this macro when declaring classes that implement this interface. */
#define NS_DECL_%(macroname)s """
iface_forward = """
/* Use this macro to declare functions that forward the behavior of this interface to another object. */
#define NS_FORWARD_%(macroname)s(_to) """
iface_forward_safe = """
/* Use this macro to declare functions that forward the behavior of this interface to another object in a safe way. */
#define NS_FORWARD_SAFE_%(macroname)s(_to) """
iface_template_prolog = """
#if 0
/* Use the code below as a template for the implementation class for this interface. */
/* Header file */
class %(implclass)s : public %(name)s
{
public:
NS_DECL_ISUPPORTS
NS_DECL_%(macroname)s
%(implclass)s();
private:
~%(implclass)s();
protected:
/* additional members */
};
/* Implementation file */
NS_IMPL_ISUPPORTS1(%(implclass)s, %(name)s)
%(implclass)s::%(implclass)s()
{
/* member initializers and constructor code */
}
%(implclass)s::~%(implclass)s()
{
/* destructor code */
}
"""
example_tmpl = """%(returntype)s %(implclass)s::%(nativeName)s(%(paramList)s)
{
return NS_ERROR_NOT_IMPLEMENTED;
}
"""
iface_template_epilog = """/* End of implementation class template. */
#endif
"""
attr_infallible_tmpl = """\
inline %(realtype)s%(nativename)s(%(args)s)
{
%(realtype)sresult;
mozilla::DebugOnly<nsresult> rv = %(nativename)s(%(argnames)s&result);
MOZ_ASSERT(NS_SUCCEEDED(rv));
return result;
}
"""
def write_interface(iface, fd):
if iface.namemap is None:
raise Exception("Interface was not resolved.")
def write_const_decls(g):
fd.write(" enum {\n")
enums = []
for c in g:
printComments(fd, c.doccomments, ' ')
basetype = c.basetype
value = c.getValue()
enums.append(" %(name)s = %(value)s%(signed)s" % {
'name': c.name,
'value': value,
'signed': (not basetype.signed) and 'U' or ''})
fd.write(",\n".join(enums))
fd.write("\n };\n\n")
def write_method_decl(m):
printComments(fd, m.doccomments, ' ')
fd.write(" /* %s */\n" % m.toIDL())
fd.write(" %s = 0;\n\n" % methodAsNative(m))
def write_attr_decl(a):
printComments(fd, a.doccomments, ' ')
fd.write(" /* %s */\n" % a.toIDL());
fd.write(" %s = 0;\n" % attributeAsNative(a, True))
if a.infallible:
fd.write(attr_infallible_tmpl %
{'realtype': a.realtype.nativeType('in'),
'nativename': attributeNativeName(a, getter=True),
'args': '' if not a.implicit_jscontext else 'JSContext* cx',
'argnames': '' if not a.implicit_jscontext else 'cx, '})
if not a.readonly:
fd.write(" %s = 0;\n" % attributeAsNative(a, False))
fd.write("\n")
defname = iface.name.upper()
if iface.name[0:2] == 'ns':
defname = 'NS_' + defname[2:]
names = uuid_decoder.match(iface.attributes.uuid).groupdict()
m3str = names['m3'] + names['m4']
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in xrange(0, 16, 2)])
if iface.name[2] == 'I':
implclass = iface.name[:2] + iface.name[3:]
else:
implclass = '_MYCLASS_'
names.update({'defname': defname,
'macroname': iface.name.upper(),
'name': iface.name,
'iid': iface.attributes.uuid,
'implclass': implclass})
fd.write(iface_header % names)
printComments(fd, iface.doccomments, '')
fd.write("class ")
foundcdata = False
for m in iface.members:
if isinstance(m, xpidl.CDATA):
foundcdata = True
if not foundcdata:
fd.write("NS_NO_VTABLE ")
if iface.attributes.deprecated:
fd.write("MOZ_DEPRECATED ")
fd.write(iface.name)
if iface.base:
fd.write(" : public %s" % iface.base)
fd.write(iface_prolog % names)
for key, group in itertools.groupby(iface.members, key=type):
if key == xpidl.ConstMember:
write_const_decls(group) # iterator of all the consts
else:
for member in group:
if key == xpidl.Attribute:
write_attr_decl(member)
elif key == xpidl.Method:
write_method_decl(member)
elif key == xpidl.CDATA:
fd.write(" %s" % member.data)
else:
raise Exception("Unexpected interface member: %s" % member)
fd.write(iface_epilog % names)
for member in iface.members:
if isinstance(member, xpidl.Attribute):
fd.write("\\\n %s; " % attributeAsNative(member, True))
if not member.readonly:
fd.write("\\\n %s; " % attributeAsNative(member, False))
elif isinstance(member, xpidl.Method):
fd.write("\\\n %s; " % methodAsNative(member))
if len(iface.members) == 0:
fd.write('\\\n /* no methods! */')
elif not member.kind in ('attribute', 'method'):
fd.write('\\')
fd.write(iface_forward % names)
def emitTemplate(tmpl, tmpl_notxpcom=None):
if tmpl_notxpcom == None:
tmpl_notxpcom = tmpl
for member in iface.members:
if isinstance(member, xpidl.Attribute):
fd.write(tmpl % {'asNative': attributeAsNative(member, True),
'nativeName': attributeNativeName(member, True),
'paramList': attributeParamNames(member)})
if not member.readonly:
fd.write(tmpl % {'asNative': attributeAsNative(member, False),
'nativeName': attributeNativeName(member, False),
'paramList': attributeParamNames(member)})
elif isinstance(member, xpidl.Method):
if member.notxpcom:
fd.write(tmpl_notxpcom % {'asNative': methodAsNative(member),
'nativeName': methodNativeName(member),
'paramList': paramlistNames(member)})
else:
fd.write(tmpl % {'asNative': methodAsNative(member),
'nativeName': methodNativeName(member),
'paramList': paramlistNames(member)})
if len(iface.members) == 0:
fd.write('\\\n /* no methods! */')
elif not member.kind in ('attribute', 'method'):
fd.write('\\')
emitTemplate("\\\n %(asNative)s { return _to %(nativeName)s(%(paramList)s); } ")
fd.write(iface_forward_safe % names)
# Don't try to safely forward notxpcom functions, because we have no
# sensible default error return. Instead, the caller will have to
# implement them.
emitTemplate("\\\n %(asNative)s { return !_to ? NS_ERROR_NULL_POINTER : _to->%(nativeName)s(%(paramList)s); } ",
"\\\n %(asNative)s; ")
fd.write(iface_template_prolog % names)
for member in iface.members:
if isinstance(member, xpidl.ConstMember) or isinstance(member, xpidl.CDATA): continue
fd.write("/* %s */\n" % member.toIDL())
if isinstance(member, xpidl.Attribute):
fd.write(example_tmpl % {'implclass': implclass,
'returntype': attributeReturnType(member, 'NS_IMETHODIMP'),
'nativeName': attributeNativeName(member, True),
'paramList': attributeParamlist(member, True)})
if not member.readonly:
fd.write(example_tmpl % {'implclass': implclass,
'returntype': attributeReturnType(member, 'NS_IMETHODIMP'),
'nativeName': attributeNativeName(member, False),
'paramList': attributeParamlist(member, False)})
elif isinstance(member, xpidl.Method):
fd.write(example_tmpl % {'implclass': implclass,
'returntype': methodReturnType(member, 'NS_IMETHODIMP'),
'nativeName': methodNativeName(member),
'paramList': paramlistAsNative(member, empty='')})
fd.write('\n')
fd.write(iface_template_epilog)
if __name__ == '__main__':
from optparse import OptionParser
o = OptionParser()
o.add_option('-I', action='append', dest='incdirs', default=['.'],
help="Directory to search for imported files")
o.add_option('--cachedir', dest='cachedir', default=None,
help="Directory in which to cache lex/parse tables.")
o.add_option('-o', dest='outfile', default=None,
help="Output file (default is stdout)")
o.add_option('-d', dest='depfile', default=None,
help="Generate a make dependency file")
o.add_option('--regen', action='store_true', dest='regen', default=False,
help="Regenerate IDL Parser cache")
options, args = o.parse_args()
file = args[0] if args else None
if options.cachedir is not None:
if not os.path.isdir(options.cachedir):
os.mkdir(options.cachedir)
sys.path.append(options.cachedir)
# The only thing special about a regen is that there are no input files.
if options.regen:
if options.cachedir is None:
print >>sys.stderr, "--regen useless without --cachedir"
# Delete the lex/yacc files. Ply is too stupid to regenerate them
# properly
for fileglobs in [os.path.join(options.cachedir, f) for f in ["xpidllex.py*", "xpidlyacc.py*"]]:
for filename in glob.glob(fileglobs):
os.remove(filename)
# Instantiate the parser.
p = xpidl.IDLParser(outputdir=options.cachedir)
if options.regen:
sys.exit(0)
if options.depfile is not None and options.outfile is None:
print >>sys.stderr, "-d requires -o"
sys.exit(1)
if options.outfile is not None:
outfd = open(options.outfile, 'w')
closeoutfd = True
else:
outfd = sys.stdout
closeoutfd = False
idl = p.parse(open(file).read(), filename=file)
idl.resolve(options.incdirs, p)
print_header(idl, outfd, file)
if closeoutfd:
outfd.close()
if options.depfile is not None:
dirname = os.path.dirname(options.depfile)
if dirname:
try:
os.makedirs(dirname)
except:
pass
depfd = open(options.depfile, 'w')
deps = [dep.replace('\\', '/') for dep in idl.deps]
print >>depfd, "%s: %s" % (options.outfile, " ".join(deps))
|
flwh/KK_mt6589_iq451
|
refs/heads/master
|
prebuilts/python/darwin-x86/2.7.5/lib/python2.7/test/test_math.py
|
71
|
# Python test set -- math module
# XXXX Should not do tests around zero only
from test.test_support import run_unittest, verbose
import unittest
import math
import os
import sys
import random
import struct
eps = 1E-05
NAN = float('nan')
INF = float('inf')
NINF = float('-inf')
# decorator for skipping tests on non-IEEE 754 platforms
requires_IEEE_754 = unittest.skipUnless(
float.__getformat__("double").startswith("IEEE"),
"test requires IEEE 754 doubles")
# detect evidence of double-rounding: fsum is not always correctly
# rounded on machines that suffer from double rounding.
x, y = 1e16, 2.9999 # use temporary values to defeat peephole optimizer
HAVE_DOUBLE_ROUNDING = (x + y == 1e16 + 4)
# locate file with test values
if __name__ == '__main__':
file = sys.argv[0]
else:
file = __file__
test_dir = os.path.dirname(file) or os.curdir
math_testcases = os.path.join(test_dir, 'math_testcases.txt')
test_file = os.path.join(test_dir, 'cmath_testcases.txt')
def to_ulps(x):
"""Convert a non-NaN float x to an integer, in such a way that
adjacent floats are converted to adjacent integers. Then
abs(ulps(x) - ulps(y)) gives the difference in ulps between two
floats.
The results from this function will only make sense on platforms
where C doubles are represented in IEEE 754 binary64 format.
"""
n = struct.unpack('<q', struct.pack('<d', x))[0]
if n < 0:
n = ~(n+2**63)
return n
def ulps_check(expected, got, ulps=20):
"""Given non-NaN floats `expected` and `got`,
check that they're equal to within the given number of ulps.
Returns None on success and an error message on failure."""
ulps_error = to_ulps(got) - to_ulps(expected)
if abs(ulps_error) <= ulps:
return None
return "error = {} ulps; permitted error = {} ulps".format(ulps_error,
ulps)
def acc_check(expected, got, rel_err=2e-15, abs_err = 5e-323):
"""Determine whether non-NaN floats a and b are equal to within a
(small) rounding error. The default values for rel_err and
abs_err are chosen to be suitable for platforms where a float is
represented by an IEEE 754 double. They allow an error of between
9 and 19 ulps."""
# need to special case infinities, since inf - inf gives nan
if math.isinf(expected) and got == expected:
return None
error = got - expected
permitted_error = max(abs_err, rel_err * abs(expected))
if abs(error) < permitted_error:
return None
return "error = {}; permitted error = {}".format(error,
permitted_error)
def parse_mtestfile(fname):
"""Parse a file with test values
-- starts a comment
blank lines, or lines containing only a comment, are ignored
other lines are expected to have the form
id fn arg -> expected [flag]*
"""
with open(fname) as fp:
for line in fp:
# strip comments, and skip blank lines
if '--' in line:
line = line[:line.index('--')]
if not line.strip():
continue
lhs, rhs = line.split('->')
id, fn, arg = lhs.split()
rhs_pieces = rhs.split()
exp = rhs_pieces[0]
flags = rhs_pieces[1:]
yield (id, fn, float(arg), float(exp), flags)
def parse_testfile(fname):
"""Parse a file with test values
Empty lines or lines starting with -- are ignored
yields id, fn, arg_real, arg_imag, exp_real, exp_imag
"""
with open(fname) as fp:
for line in fp:
# skip comment lines and blank lines
if line.startswith('--') or not line.strip():
continue
lhs, rhs = line.split('->')
id, fn, arg_real, arg_imag = lhs.split()
rhs_pieces = rhs.split()
exp_real, exp_imag = rhs_pieces[0], rhs_pieces[1]
flags = rhs_pieces[2:]
yield (id, fn,
float(arg_real), float(arg_imag),
float(exp_real), float(exp_imag),
flags
)
class MathTests(unittest.TestCase):
def ftest(self, name, value, expected):
if abs(value-expected) > eps:
# Use %r instead of %f so the error message
# displays full precision. Otherwise discrepancies
# in the last few bits will lead to very confusing
# error messages
self.fail('%s returned %r, expected %r' %
(name, value, expected))
def testConstants(self):
self.ftest('pi', math.pi, 3.1415926)
self.ftest('e', math.e, 2.7182818)
def testAcos(self):
self.assertRaises(TypeError, math.acos)
self.ftest('acos(-1)', math.acos(-1), math.pi)
self.ftest('acos(0)', math.acos(0), math.pi/2)
self.ftest('acos(1)', math.acos(1), 0)
self.assertRaises(ValueError, math.acos, INF)
self.assertRaises(ValueError, math.acos, NINF)
self.assertTrue(math.isnan(math.acos(NAN)))
def testAcosh(self):
self.assertRaises(TypeError, math.acosh)
self.ftest('acosh(1)', math.acosh(1), 0)
self.ftest('acosh(2)', math.acosh(2), 1.3169578969248168)
self.assertRaises(ValueError, math.acosh, 0)
self.assertRaises(ValueError, math.acosh, -1)
self.assertEqual(math.acosh(INF), INF)
self.assertRaises(ValueError, math.acosh, NINF)
self.assertTrue(math.isnan(math.acosh(NAN)))
def testAsin(self):
self.assertRaises(TypeError, math.asin)
self.ftest('asin(-1)', math.asin(-1), -math.pi/2)
self.ftest('asin(0)', math.asin(0), 0)
self.ftest('asin(1)', math.asin(1), math.pi/2)
self.assertRaises(ValueError, math.asin, INF)
self.assertRaises(ValueError, math.asin, NINF)
self.assertTrue(math.isnan(math.asin(NAN)))
def testAsinh(self):
self.assertRaises(TypeError, math.asinh)
self.ftest('asinh(0)', math.asinh(0), 0)
self.ftest('asinh(1)', math.asinh(1), 0.88137358701954305)
self.ftest('asinh(-1)', math.asinh(-1), -0.88137358701954305)
self.assertEqual(math.asinh(INF), INF)
self.assertEqual(math.asinh(NINF), NINF)
self.assertTrue(math.isnan(math.asinh(NAN)))
def testAtan(self):
self.assertRaises(TypeError, math.atan)
self.ftest('atan(-1)', math.atan(-1), -math.pi/4)
self.ftest('atan(0)', math.atan(0), 0)
self.ftest('atan(1)', math.atan(1), math.pi/4)
self.ftest('atan(inf)', math.atan(INF), math.pi/2)
self.ftest('atan(-inf)', math.atan(NINF), -math.pi/2)
self.assertTrue(math.isnan(math.atan(NAN)))
def testAtanh(self):
self.assertRaises(TypeError, math.atan)
self.ftest('atanh(0)', math.atanh(0), 0)
self.ftest('atanh(0.5)', math.atanh(0.5), 0.54930614433405489)
self.ftest('atanh(-0.5)', math.atanh(-0.5), -0.54930614433405489)
self.assertRaises(ValueError, math.atanh, 1)
self.assertRaises(ValueError, math.atanh, -1)
self.assertRaises(ValueError, math.atanh, INF)
self.assertRaises(ValueError, math.atanh, NINF)
self.assertTrue(math.isnan(math.atanh(NAN)))
def testAtan2(self):
self.assertRaises(TypeError, math.atan2)
self.ftest('atan2(-1, 0)', math.atan2(-1, 0), -math.pi/2)
self.ftest('atan2(-1, 1)', math.atan2(-1, 1), -math.pi/4)
self.ftest('atan2(0, 1)', math.atan2(0, 1), 0)
self.ftest('atan2(1, 1)', math.atan2(1, 1), math.pi/4)
self.ftest('atan2(1, 0)', math.atan2(1, 0), math.pi/2)
# math.atan2(0, x)
self.ftest('atan2(0., -inf)', math.atan2(0., NINF), math.pi)
self.ftest('atan2(0., -2.3)', math.atan2(0., -2.3), math.pi)
self.ftest('atan2(0., -0.)', math.atan2(0., -0.), math.pi)
self.assertEqual(math.atan2(0., 0.), 0.)
self.assertEqual(math.atan2(0., 2.3), 0.)
self.assertEqual(math.atan2(0., INF), 0.)
self.assertTrue(math.isnan(math.atan2(0., NAN)))
# math.atan2(-0, x)
self.ftest('atan2(-0., -inf)', math.atan2(-0., NINF), -math.pi)
self.ftest('atan2(-0., -2.3)', math.atan2(-0., -2.3), -math.pi)
self.ftest('atan2(-0., -0.)', math.atan2(-0., -0.), -math.pi)
self.assertEqual(math.atan2(-0., 0.), -0.)
self.assertEqual(math.atan2(-0., 2.3), -0.)
self.assertEqual(math.atan2(-0., INF), -0.)
self.assertTrue(math.isnan(math.atan2(-0., NAN)))
# math.atan2(INF, x)
self.ftest('atan2(inf, -inf)', math.atan2(INF, NINF), math.pi*3/4)
self.ftest('atan2(inf, -2.3)', math.atan2(INF, -2.3), math.pi/2)
self.ftest('atan2(inf, -0.)', math.atan2(INF, -0.0), math.pi/2)
self.ftest('atan2(inf, 0.)', math.atan2(INF, 0.0), math.pi/2)
self.ftest('atan2(inf, 2.3)', math.atan2(INF, 2.3), math.pi/2)
self.ftest('atan2(inf, inf)', math.atan2(INF, INF), math.pi/4)
self.assertTrue(math.isnan(math.atan2(INF, NAN)))
# math.atan2(NINF, x)
self.ftest('atan2(-inf, -inf)', math.atan2(NINF, NINF), -math.pi*3/4)
self.ftest('atan2(-inf, -2.3)', math.atan2(NINF, -2.3), -math.pi/2)
self.ftest('atan2(-inf, -0.)', math.atan2(NINF, -0.0), -math.pi/2)
self.ftest('atan2(-inf, 0.)', math.atan2(NINF, 0.0), -math.pi/2)
self.ftest('atan2(-inf, 2.3)', math.atan2(NINF, 2.3), -math.pi/2)
self.ftest('atan2(-inf, inf)', math.atan2(NINF, INF), -math.pi/4)
self.assertTrue(math.isnan(math.atan2(NINF, NAN)))
# math.atan2(+finite, x)
self.ftest('atan2(2.3, -inf)', math.atan2(2.3, NINF), math.pi)
self.ftest('atan2(2.3, -0.)', math.atan2(2.3, -0.), math.pi/2)
self.ftest('atan2(2.3, 0.)', math.atan2(2.3, 0.), math.pi/2)
self.assertEqual(math.atan2(2.3, INF), 0.)
self.assertTrue(math.isnan(math.atan2(2.3, NAN)))
# math.atan2(-finite, x)
self.ftest('atan2(-2.3, -inf)', math.atan2(-2.3, NINF), -math.pi)
self.ftest('atan2(-2.3, -0.)', math.atan2(-2.3, -0.), -math.pi/2)
self.ftest('atan2(-2.3, 0.)', math.atan2(-2.3, 0.), -math.pi/2)
self.assertEqual(math.atan2(-2.3, INF), -0.)
self.assertTrue(math.isnan(math.atan2(-2.3, NAN)))
# math.atan2(NAN, x)
self.assertTrue(math.isnan(math.atan2(NAN, NINF)))
self.assertTrue(math.isnan(math.atan2(NAN, -2.3)))
self.assertTrue(math.isnan(math.atan2(NAN, -0.)))
self.assertTrue(math.isnan(math.atan2(NAN, 0.)))
self.assertTrue(math.isnan(math.atan2(NAN, 2.3)))
self.assertTrue(math.isnan(math.atan2(NAN, INF)))
self.assertTrue(math.isnan(math.atan2(NAN, NAN)))
def testCeil(self):
self.assertRaises(TypeError, math.ceil)
# These types will be int in py3k.
self.assertEqual(float, type(math.ceil(1)))
self.assertEqual(float, type(math.ceil(1L)))
self.assertEqual(float, type(math.ceil(1.0)))
self.ftest('ceil(0.5)', math.ceil(0.5), 1)
self.ftest('ceil(1.0)', math.ceil(1.0), 1)
self.ftest('ceil(1.5)', math.ceil(1.5), 2)
self.ftest('ceil(-0.5)', math.ceil(-0.5), 0)
self.ftest('ceil(-1.0)', math.ceil(-1.0), -1)
self.ftest('ceil(-1.5)', math.ceil(-1.5), -1)
self.assertEqual(math.ceil(INF), INF)
self.assertEqual(math.ceil(NINF), NINF)
self.assertTrue(math.isnan(math.ceil(NAN)))
class TestCeil(object):
def __float__(self):
return 41.3
class TestNoCeil(object):
pass
self.ftest('ceil(TestCeil())', math.ceil(TestCeil()), 42)
self.assertRaises(TypeError, math.ceil, TestNoCeil())
t = TestNoCeil()
t.__ceil__ = lambda *args: args
self.assertRaises(TypeError, math.ceil, t)
self.assertRaises(TypeError, math.ceil, t, 0)
@requires_IEEE_754
def testCopysign(self):
self.assertEqual(math.copysign(1, 42), 1.0)
self.assertEqual(math.copysign(0., 42), 0.0)
self.assertEqual(math.copysign(1., -42), -1.0)
self.assertEqual(math.copysign(3, 0.), 3.0)
self.assertEqual(math.copysign(4., -0.), -4.0)
self.assertRaises(TypeError, math.copysign)
# copysign should let us distinguish signs of zeros
self.assertEqual(math.copysign(1., 0.), 1.)
self.assertEqual(math.copysign(1., -0.), -1.)
self.assertEqual(math.copysign(INF, 0.), INF)
self.assertEqual(math.copysign(INF, -0.), NINF)
self.assertEqual(math.copysign(NINF, 0.), INF)
self.assertEqual(math.copysign(NINF, -0.), NINF)
# and of infinities
self.assertEqual(math.copysign(1., INF), 1.)
self.assertEqual(math.copysign(1., NINF), -1.)
self.assertEqual(math.copysign(INF, INF), INF)
self.assertEqual(math.copysign(INF, NINF), NINF)
self.assertEqual(math.copysign(NINF, INF), INF)
self.assertEqual(math.copysign(NINF, NINF), NINF)
self.assertTrue(math.isnan(math.copysign(NAN, 1.)))
self.assertTrue(math.isnan(math.copysign(NAN, INF)))
self.assertTrue(math.isnan(math.copysign(NAN, NINF)))
self.assertTrue(math.isnan(math.copysign(NAN, NAN)))
# copysign(INF, NAN) may be INF or it may be NINF, since
# we don't know whether the sign bit of NAN is set on any
# given platform.
self.assertTrue(math.isinf(math.copysign(INF, NAN)))
# similarly, copysign(2., NAN) could be 2. or -2.
self.assertEqual(abs(math.copysign(2., NAN)), 2.)
def testCos(self):
self.assertRaises(TypeError, math.cos)
self.ftest('cos(-pi/2)', math.cos(-math.pi/2), 0)
self.ftest('cos(0)', math.cos(0), 1)
self.ftest('cos(pi/2)', math.cos(math.pi/2), 0)
self.ftest('cos(pi)', math.cos(math.pi), -1)
try:
self.assertTrue(math.isnan(math.cos(INF)))
self.assertTrue(math.isnan(math.cos(NINF)))
except ValueError:
self.assertRaises(ValueError, math.cos, INF)
self.assertRaises(ValueError, math.cos, NINF)
self.assertTrue(math.isnan(math.cos(NAN)))
def testCosh(self):
self.assertRaises(TypeError, math.cosh)
self.ftest('cosh(0)', math.cosh(0), 1)
self.ftest('cosh(2)-2*cosh(1)**2', math.cosh(2)-2*math.cosh(1)**2, -1) # Thanks to Lambert
self.assertEqual(math.cosh(INF), INF)
self.assertEqual(math.cosh(NINF), INF)
self.assertTrue(math.isnan(math.cosh(NAN)))
def testDegrees(self):
self.assertRaises(TypeError, math.degrees)
self.ftest('degrees(pi)', math.degrees(math.pi), 180.0)
self.ftest('degrees(pi/2)', math.degrees(math.pi/2), 90.0)
self.ftest('degrees(-pi/4)', math.degrees(-math.pi/4), -45.0)
def testExp(self):
self.assertRaises(TypeError, math.exp)
self.ftest('exp(-1)', math.exp(-1), 1/math.e)
self.ftest('exp(0)', math.exp(0), 1)
self.ftest('exp(1)', math.exp(1), math.e)
self.assertEqual(math.exp(INF), INF)
self.assertEqual(math.exp(NINF), 0.)
self.assertTrue(math.isnan(math.exp(NAN)))
def testFabs(self):
self.assertRaises(TypeError, math.fabs)
self.ftest('fabs(-1)', math.fabs(-1), 1)
self.ftest('fabs(0)', math.fabs(0), 0)
self.ftest('fabs(1)', math.fabs(1), 1)
def testFactorial(self):
def fact(n):
result = 1
for i in range(1, int(n)+1):
result *= i
return result
values = range(10) + [50, 100, 500]
random.shuffle(values)
for x in values:
for cast in (int, long, float):
self.assertEqual(math.factorial(cast(x)), fact(x), (x, fact(x), math.factorial(x)))
self.assertRaises(ValueError, math.factorial, -1)
self.assertRaises(ValueError, math.factorial, math.pi)
def testFloor(self):
self.assertRaises(TypeError, math.floor)
# These types will be int in py3k.
self.assertEqual(float, type(math.floor(1)))
self.assertEqual(float, type(math.floor(1L)))
self.assertEqual(float, type(math.floor(1.0)))
self.ftest('floor(0.5)', math.floor(0.5), 0)
self.ftest('floor(1.0)', math.floor(1.0), 1)
self.ftest('floor(1.5)', math.floor(1.5), 1)
self.ftest('floor(-0.5)', math.floor(-0.5), -1)
self.ftest('floor(-1.0)', math.floor(-1.0), -1)
self.ftest('floor(-1.5)', math.floor(-1.5), -2)
# pow() relies on floor() to check for integers
# This fails on some platforms - so check it here
self.ftest('floor(1.23e167)', math.floor(1.23e167), 1.23e167)
self.ftest('floor(-1.23e167)', math.floor(-1.23e167), -1.23e167)
self.assertEqual(math.ceil(INF), INF)
self.assertEqual(math.ceil(NINF), NINF)
self.assertTrue(math.isnan(math.floor(NAN)))
class TestFloor(object):
def __float__(self):
return 42.3
class TestNoFloor(object):
pass
self.ftest('floor(TestFloor())', math.floor(TestFloor()), 42)
self.assertRaises(TypeError, math.floor, TestNoFloor())
t = TestNoFloor()
t.__floor__ = lambda *args: args
self.assertRaises(TypeError, math.floor, t)
self.assertRaises(TypeError, math.floor, t, 0)
def testFmod(self):
self.assertRaises(TypeError, math.fmod)
self.ftest('fmod(10,1)', math.fmod(10,1), 0)
self.ftest('fmod(10,0.5)', math.fmod(10,0.5), 0)
self.ftest('fmod(10,1.5)', math.fmod(10,1.5), 1)
self.ftest('fmod(-10,1)', math.fmod(-10,1), 0)
self.ftest('fmod(-10,0.5)', math.fmod(-10,0.5), 0)
self.ftest('fmod(-10,1.5)', math.fmod(-10,1.5), -1)
self.assertTrue(math.isnan(math.fmod(NAN, 1.)))
self.assertTrue(math.isnan(math.fmod(1., NAN)))
self.assertTrue(math.isnan(math.fmod(NAN, NAN)))
self.assertRaises(ValueError, math.fmod, 1., 0.)
self.assertRaises(ValueError, math.fmod, INF, 1.)
self.assertRaises(ValueError, math.fmod, NINF, 1.)
self.assertRaises(ValueError, math.fmod, INF, 0.)
self.assertEqual(math.fmod(3.0, INF), 3.0)
self.assertEqual(math.fmod(-3.0, INF), -3.0)
self.assertEqual(math.fmod(3.0, NINF), 3.0)
self.assertEqual(math.fmod(-3.0, NINF), -3.0)
self.assertEqual(math.fmod(0.0, 3.0), 0.0)
self.assertEqual(math.fmod(0.0, NINF), 0.0)
def testFrexp(self):
self.assertRaises(TypeError, math.frexp)
def testfrexp(name, result, expected):
(mant, exp), (emant, eexp) = result, expected
if abs(mant-emant) > eps or exp != eexp:
self.fail('%s returned %r, expected %r'%\
(name, (mant, exp), (emant,eexp)))
testfrexp('frexp(-1)', math.frexp(-1), (-0.5, 1))
testfrexp('frexp(0)', math.frexp(0), (0, 0))
testfrexp('frexp(1)', math.frexp(1), (0.5, 1))
testfrexp('frexp(2)', math.frexp(2), (0.5, 2))
self.assertEqual(math.frexp(INF)[0], INF)
self.assertEqual(math.frexp(NINF)[0], NINF)
self.assertTrue(math.isnan(math.frexp(NAN)[0]))
@requires_IEEE_754
@unittest.skipIf(HAVE_DOUBLE_ROUNDING,
"fsum is not exact on machines with double rounding")
def testFsum(self):
# math.fsum relies on exact rounding for correct operation.
# There's a known problem with IA32 floating-point that causes
# inexact rounding in some situations, and will cause the
# math.fsum tests below to fail; see issue #2937. On non IEEE
# 754 platforms, and on IEEE 754 platforms that exhibit the
# problem described in issue #2937, we simply skip the whole
# test.
# Python version of math.fsum, for comparison. Uses a
# different algorithm based on frexp, ldexp and integer
# arithmetic.
from sys import float_info
mant_dig = float_info.mant_dig
etiny = float_info.min_exp - mant_dig
def msum(iterable):
"""Full precision summation. Compute sum(iterable) without any
intermediate accumulation of error. Based on the 'lsum' function
at http://code.activestate.com/recipes/393090/
"""
tmant, texp = 0, 0
for x in iterable:
mant, exp = math.frexp(x)
mant, exp = int(math.ldexp(mant, mant_dig)), exp - mant_dig
if texp > exp:
tmant <<= texp-exp
texp = exp
else:
mant <<= exp-texp
tmant += mant
# Round tmant * 2**texp to a float. The original recipe
# used float(str(tmant)) * 2.0**texp for this, but that's
# a little unsafe because str -> float conversion can't be
# relied upon to do correct rounding on all platforms.
tail = max(len(bin(abs(tmant)))-2 - mant_dig, etiny - texp)
if tail > 0:
h = 1 << (tail-1)
tmant = tmant // (2*h) + bool(tmant & h and tmant & 3*h-1)
texp += tail
return math.ldexp(tmant, texp)
test_values = [
([], 0.0),
([0.0], 0.0),
([1e100, 1.0, -1e100, 1e-100, 1e50, -1.0, -1e50], 1e-100),
([2.0**53, -0.5, -2.0**-54], 2.0**53-1.0),
([2.0**53, 1.0, 2.0**-100], 2.0**53+2.0),
([2.0**53+10.0, 1.0, 2.0**-100], 2.0**53+12.0),
([2.0**53-4.0, 0.5, 2.0**-54], 2.0**53-3.0),
([1./n for n in range(1, 1001)],
float.fromhex('0x1.df11f45f4e61ap+2')),
([(-1.)**n/n for n in range(1, 1001)],
float.fromhex('-0x1.62a2af1bd3624p-1')),
([1.7**(i+1)-1.7**i for i in range(1000)] + [-1.7**1000], -1.0),
([1e16, 1., 1e-16], 10000000000000002.0),
([1e16-2., 1.-2.**-53, -(1e16-2.), -(1.-2.**-53)], 0.0),
# exercise code for resizing partials array
([2.**n - 2.**(n+50) + 2.**(n+52) for n in range(-1074, 972, 2)] +
[-2.**1022],
float.fromhex('0x1.5555555555555p+970')),
]
for i, (vals, expected) in enumerate(test_values):
try:
actual = math.fsum(vals)
except OverflowError:
self.fail("test %d failed: got OverflowError, expected %r "
"for math.fsum(%.100r)" % (i, expected, vals))
except ValueError:
self.fail("test %d failed: got ValueError, expected %r "
"for math.fsum(%.100r)" % (i, expected, vals))
self.assertEqual(actual, expected)
from random import random, gauss, shuffle
for j in xrange(1000):
vals = [7, 1e100, -7, -1e100, -9e-20, 8e-20] * 10
s = 0
for i in xrange(200):
v = gauss(0, random()) ** 7 - s
s += v
vals.append(v)
shuffle(vals)
s = msum(vals)
self.assertEqual(msum(vals), math.fsum(vals))
def testHypot(self):
self.assertRaises(TypeError, math.hypot)
self.ftest('hypot(0,0)', math.hypot(0,0), 0)
self.ftest('hypot(3,4)', math.hypot(3,4), 5)
self.assertEqual(math.hypot(NAN, INF), INF)
self.assertEqual(math.hypot(INF, NAN), INF)
self.assertEqual(math.hypot(NAN, NINF), INF)
self.assertEqual(math.hypot(NINF, NAN), INF)
self.assertTrue(math.isnan(math.hypot(1.0, NAN)))
self.assertTrue(math.isnan(math.hypot(NAN, -2.0)))
def testLdexp(self):
self.assertRaises(TypeError, math.ldexp)
self.ftest('ldexp(0,1)', math.ldexp(0,1), 0)
self.ftest('ldexp(1,1)', math.ldexp(1,1), 2)
self.ftest('ldexp(1,-1)', math.ldexp(1,-1), 0.5)
self.ftest('ldexp(-1,1)', math.ldexp(-1,1), -2)
self.assertRaises(OverflowError, math.ldexp, 1., 1000000)
self.assertRaises(OverflowError, math.ldexp, -1., 1000000)
self.assertEqual(math.ldexp(1., -1000000), 0.)
self.assertEqual(math.ldexp(-1., -1000000), -0.)
self.assertEqual(math.ldexp(INF, 30), INF)
self.assertEqual(math.ldexp(NINF, -213), NINF)
self.assertTrue(math.isnan(math.ldexp(NAN, 0)))
# large second argument
for n in [10**5, 10L**5, 10**10, 10L**10, 10**20, 10**40]:
self.assertEqual(math.ldexp(INF, -n), INF)
self.assertEqual(math.ldexp(NINF, -n), NINF)
self.assertEqual(math.ldexp(1., -n), 0.)
self.assertEqual(math.ldexp(-1., -n), -0.)
self.assertEqual(math.ldexp(0., -n), 0.)
self.assertEqual(math.ldexp(-0., -n), -0.)
self.assertTrue(math.isnan(math.ldexp(NAN, -n)))
self.assertRaises(OverflowError, math.ldexp, 1., n)
self.assertRaises(OverflowError, math.ldexp, -1., n)
self.assertEqual(math.ldexp(0., n), 0.)
self.assertEqual(math.ldexp(-0., n), -0.)
self.assertEqual(math.ldexp(INF, n), INF)
self.assertEqual(math.ldexp(NINF, n), NINF)
self.assertTrue(math.isnan(math.ldexp(NAN, n)))
def testLog(self):
self.assertRaises(TypeError, math.log)
self.ftest('log(1/e)', math.log(1/math.e), -1)
self.ftest('log(1)', math.log(1), 0)
self.ftest('log(e)', math.log(math.e), 1)
self.ftest('log(32,2)', math.log(32,2), 5)
self.ftest('log(10**40, 10)', math.log(10**40, 10), 40)
self.ftest('log(10**40, 10**20)', math.log(10**40, 10**20), 2)
self.assertEqual(math.log(INF), INF)
self.assertRaises(ValueError, math.log, NINF)
self.assertTrue(math.isnan(math.log(NAN)))
def testLog1p(self):
self.assertRaises(TypeError, math.log1p)
self.ftest('log1p(1/e -1)', math.log1p(1/math.e-1), -1)
self.ftest('log1p(0)', math.log1p(0), 0)
self.ftest('log1p(e-1)', math.log1p(math.e-1), 1)
self.ftest('log1p(1)', math.log1p(1), math.log(2))
self.assertEqual(math.log1p(INF), INF)
self.assertRaises(ValueError, math.log1p, NINF)
self.assertTrue(math.isnan(math.log1p(NAN)))
n= 2**90
self.assertAlmostEqual(math.log1p(n), 62.383246250395075)
self.assertAlmostEqual(math.log1p(n), math.log1p(float(n)))
def testLog10(self):
self.assertRaises(TypeError, math.log10)
self.ftest('log10(0.1)', math.log10(0.1), -1)
self.ftest('log10(1)', math.log10(1), 0)
self.ftest('log10(10)', math.log10(10), 1)
self.assertEqual(math.log(INF), INF)
self.assertRaises(ValueError, math.log10, NINF)
self.assertTrue(math.isnan(math.log10(NAN)))
def testModf(self):
self.assertRaises(TypeError, math.modf)
def testmodf(name, result, expected):
(v1, v2), (e1, e2) = result, expected
if abs(v1-e1) > eps or abs(v2-e2):
self.fail('%s returned %r, expected %r'%\
(name, (v1,v2), (e1,e2)))
testmodf('modf(1.5)', math.modf(1.5), (0.5, 1.0))
testmodf('modf(-1.5)', math.modf(-1.5), (-0.5, -1.0))
self.assertEqual(math.modf(INF), (0.0, INF))
self.assertEqual(math.modf(NINF), (-0.0, NINF))
modf_nan = math.modf(NAN)
self.assertTrue(math.isnan(modf_nan[0]))
self.assertTrue(math.isnan(modf_nan[1]))
def testPow(self):
self.assertRaises(TypeError, math.pow)
self.ftest('pow(0,1)', math.pow(0,1), 0)
self.ftest('pow(1,0)', math.pow(1,0), 1)
self.ftest('pow(2,1)', math.pow(2,1), 2)
self.ftest('pow(2,-1)', math.pow(2,-1), 0.5)
self.assertEqual(math.pow(INF, 1), INF)
self.assertEqual(math.pow(NINF, 1), NINF)
self.assertEqual((math.pow(1, INF)), 1.)
self.assertEqual((math.pow(1, NINF)), 1.)
self.assertTrue(math.isnan(math.pow(NAN, 1)))
self.assertTrue(math.isnan(math.pow(2, NAN)))
self.assertTrue(math.isnan(math.pow(0, NAN)))
self.assertEqual(math.pow(1, NAN), 1)
# pow(0., x)
self.assertEqual(math.pow(0., INF), 0.)
self.assertEqual(math.pow(0., 3.), 0.)
self.assertEqual(math.pow(0., 2.3), 0.)
self.assertEqual(math.pow(0., 2.), 0.)
self.assertEqual(math.pow(0., 0.), 1.)
self.assertEqual(math.pow(0., -0.), 1.)
self.assertRaises(ValueError, math.pow, 0., -2.)
self.assertRaises(ValueError, math.pow, 0., -2.3)
self.assertRaises(ValueError, math.pow, 0., -3.)
self.assertRaises(ValueError, math.pow, 0., NINF)
self.assertTrue(math.isnan(math.pow(0., NAN)))
# pow(INF, x)
self.assertEqual(math.pow(INF, INF), INF)
self.assertEqual(math.pow(INF, 3.), INF)
self.assertEqual(math.pow(INF, 2.3), INF)
self.assertEqual(math.pow(INF, 2.), INF)
self.assertEqual(math.pow(INF, 0.), 1.)
self.assertEqual(math.pow(INF, -0.), 1.)
self.assertEqual(math.pow(INF, -2.), 0.)
self.assertEqual(math.pow(INF, -2.3), 0.)
self.assertEqual(math.pow(INF, -3.), 0.)
self.assertEqual(math.pow(INF, NINF), 0.)
self.assertTrue(math.isnan(math.pow(INF, NAN)))
# pow(-0., x)
self.assertEqual(math.pow(-0., INF), 0.)
self.assertEqual(math.pow(-0., 3.), -0.)
self.assertEqual(math.pow(-0., 2.3), 0.)
self.assertEqual(math.pow(-0., 2.), 0.)
self.assertEqual(math.pow(-0., 0.), 1.)
self.assertEqual(math.pow(-0., -0.), 1.)
self.assertRaises(ValueError, math.pow, -0., -2.)
self.assertRaises(ValueError, math.pow, -0., -2.3)
self.assertRaises(ValueError, math.pow, -0., -3.)
self.assertRaises(ValueError, math.pow, -0., NINF)
self.assertTrue(math.isnan(math.pow(-0., NAN)))
# pow(NINF, x)
self.assertEqual(math.pow(NINF, INF), INF)
self.assertEqual(math.pow(NINF, 3.), NINF)
self.assertEqual(math.pow(NINF, 2.3), INF)
self.assertEqual(math.pow(NINF, 2.), INF)
self.assertEqual(math.pow(NINF, 0.), 1.)
self.assertEqual(math.pow(NINF, -0.), 1.)
self.assertEqual(math.pow(NINF, -2.), 0.)
self.assertEqual(math.pow(NINF, -2.3), 0.)
self.assertEqual(math.pow(NINF, -3.), -0.)
self.assertEqual(math.pow(NINF, NINF), 0.)
self.assertTrue(math.isnan(math.pow(NINF, NAN)))
# pow(-1, x)
self.assertEqual(math.pow(-1., INF), 1.)
self.assertEqual(math.pow(-1., 3.), -1.)
self.assertRaises(ValueError, math.pow, -1., 2.3)
self.assertEqual(math.pow(-1., 2.), 1.)
self.assertEqual(math.pow(-1., 0.), 1.)
self.assertEqual(math.pow(-1., -0.), 1.)
self.assertEqual(math.pow(-1., -2.), 1.)
self.assertRaises(ValueError, math.pow, -1., -2.3)
self.assertEqual(math.pow(-1., -3.), -1.)
self.assertEqual(math.pow(-1., NINF), 1.)
self.assertTrue(math.isnan(math.pow(-1., NAN)))
# pow(1, x)
self.assertEqual(math.pow(1., INF), 1.)
self.assertEqual(math.pow(1., 3.), 1.)
self.assertEqual(math.pow(1., 2.3), 1.)
self.assertEqual(math.pow(1., 2.), 1.)
self.assertEqual(math.pow(1., 0.), 1.)
self.assertEqual(math.pow(1., -0.), 1.)
self.assertEqual(math.pow(1., -2.), 1.)
self.assertEqual(math.pow(1., -2.3), 1.)
self.assertEqual(math.pow(1., -3.), 1.)
self.assertEqual(math.pow(1., NINF), 1.)
self.assertEqual(math.pow(1., NAN), 1.)
# pow(x, 0) should be 1 for any x
self.assertEqual(math.pow(2.3, 0.), 1.)
self.assertEqual(math.pow(-2.3, 0.), 1.)
self.assertEqual(math.pow(NAN, 0.), 1.)
self.assertEqual(math.pow(2.3, -0.), 1.)
self.assertEqual(math.pow(-2.3, -0.), 1.)
self.assertEqual(math.pow(NAN, -0.), 1.)
# pow(x, y) is invalid if x is negative and y is not integral
self.assertRaises(ValueError, math.pow, -1., 2.3)
self.assertRaises(ValueError, math.pow, -15., -3.1)
# pow(x, NINF)
self.assertEqual(math.pow(1.9, NINF), 0.)
self.assertEqual(math.pow(1.1, NINF), 0.)
self.assertEqual(math.pow(0.9, NINF), INF)
self.assertEqual(math.pow(0.1, NINF), INF)
self.assertEqual(math.pow(-0.1, NINF), INF)
self.assertEqual(math.pow(-0.9, NINF), INF)
self.assertEqual(math.pow(-1.1, NINF), 0.)
self.assertEqual(math.pow(-1.9, NINF), 0.)
# pow(x, INF)
self.assertEqual(math.pow(1.9, INF), INF)
self.assertEqual(math.pow(1.1, INF), INF)
self.assertEqual(math.pow(0.9, INF), 0.)
self.assertEqual(math.pow(0.1, INF), 0.)
self.assertEqual(math.pow(-0.1, INF), 0.)
self.assertEqual(math.pow(-0.9, INF), 0.)
self.assertEqual(math.pow(-1.1, INF), INF)
self.assertEqual(math.pow(-1.9, INF), INF)
# pow(x, y) should work for x negative, y an integer
self.ftest('(-2.)**3.', math.pow(-2.0, 3.0), -8.0)
self.ftest('(-2.)**2.', math.pow(-2.0, 2.0), 4.0)
self.ftest('(-2.)**1.', math.pow(-2.0, 1.0), -2.0)
self.ftest('(-2.)**0.', math.pow(-2.0, 0.0), 1.0)
self.ftest('(-2.)**-0.', math.pow(-2.0, -0.0), 1.0)
self.ftest('(-2.)**-1.', math.pow(-2.0, -1.0), -0.5)
self.ftest('(-2.)**-2.', math.pow(-2.0, -2.0), 0.25)
self.ftest('(-2.)**-3.', math.pow(-2.0, -3.0), -0.125)
self.assertRaises(ValueError, math.pow, -2.0, -0.5)
self.assertRaises(ValueError, math.pow, -2.0, 0.5)
# the following tests have been commented out since they don't
# really belong here: the implementation of ** for floats is
# independent of the implementation of math.pow
#self.assertEqual(1**NAN, 1)
#self.assertEqual(1**INF, 1)
#self.assertEqual(1**NINF, 1)
#self.assertEqual(1**0, 1)
#self.assertEqual(1.**NAN, 1)
#self.assertEqual(1.**INF, 1)
#self.assertEqual(1.**NINF, 1)
#self.assertEqual(1.**0, 1)
def testRadians(self):
self.assertRaises(TypeError, math.radians)
self.ftest('radians(180)', math.radians(180), math.pi)
self.ftest('radians(90)', math.radians(90), math.pi/2)
self.ftest('radians(-45)', math.radians(-45), -math.pi/4)
def testSin(self):
self.assertRaises(TypeError, math.sin)
self.ftest('sin(0)', math.sin(0), 0)
self.ftest('sin(pi/2)', math.sin(math.pi/2), 1)
self.ftest('sin(-pi/2)', math.sin(-math.pi/2), -1)
try:
self.assertTrue(math.isnan(math.sin(INF)))
self.assertTrue(math.isnan(math.sin(NINF)))
except ValueError:
self.assertRaises(ValueError, math.sin, INF)
self.assertRaises(ValueError, math.sin, NINF)
self.assertTrue(math.isnan(math.sin(NAN)))
def testSinh(self):
self.assertRaises(TypeError, math.sinh)
self.ftest('sinh(0)', math.sinh(0), 0)
self.ftest('sinh(1)**2-cosh(1)**2', math.sinh(1)**2-math.cosh(1)**2, -1)
self.ftest('sinh(1)+sinh(-1)', math.sinh(1)+math.sinh(-1), 0)
self.assertEqual(math.sinh(INF), INF)
self.assertEqual(math.sinh(NINF), NINF)
self.assertTrue(math.isnan(math.sinh(NAN)))
def testSqrt(self):
self.assertRaises(TypeError, math.sqrt)
self.ftest('sqrt(0)', math.sqrt(0), 0)
self.ftest('sqrt(1)', math.sqrt(1), 1)
self.ftest('sqrt(4)', math.sqrt(4), 2)
self.assertEqual(math.sqrt(INF), INF)
self.assertRaises(ValueError, math.sqrt, NINF)
self.assertTrue(math.isnan(math.sqrt(NAN)))
def testTan(self):
self.assertRaises(TypeError, math.tan)
self.ftest('tan(0)', math.tan(0), 0)
self.ftest('tan(pi/4)', math.tan(math.pi/4), 1)
self.ftest('tan(-pi/4)', math.tan(-math.pi/4), -1)
try:
self.assertTrue(math.isnan(math.tan(INF)))
self.assertTrue(math.isnan(math.tan(NINF)))
except:
self.assertRaises(ValueError, math.tan, INF)
self.assertRaises(ValueError, math.tan, NINF)
self.assertTrue(math.isnan(math.tan(NAN)))
def testTanh(self):
self.assertRaises(TypeError, math.tanh)
self.ftest('tanh(0)', math.tanh(0), 0)
self.ftest('tanh(1)+tanh(-1)', math.tanh(1)+math.tanh(-1), 0)
self.ftest('tanh(inf)', math.tanh(INF), 1)
self.ftest('tanh(-inf)', math.tanh(NINF), -1)
self.assertTrue(math.isnan(math.tanh(NAN)))
# check that tanh(-0.) == -0. on IEEE 754 systems
if float.__getformat__("double").startswith("IEEE"):
self.assertEqual(math.tanh(-0.), -0.)
self.assertEqual(math.copysign(1., math.tanh(-0.)),
math.copysign(1., -0.))
def test_trunc(self):
self.assertEqual(math.trunc(1), 1)
self.assertEqual(math.trunc(-1), -1)
self.assertEqual(type(math.trunc(1)), int)
self.assertEqual(type(math.trunc(1.5)), int)
self.assertEqual(math.trunc(1.5), 1)
self.assertEqual(math.trunc(-1.5), -1)
self.assertEqual(math.trunc(1.999999), 1)
self.assertEqual(math.trunc(-1.999999), -1)
self.assertEqual(math.trunc(-0.999999), -0)
self.assertEqual(math.trunc(-100.999), -100)
class TestTrunc(object):
def __trunc__(self):
return 23
class TestNoTrunc(object):
pass
self.assertEqual(math.trunc(TestTrunc()), 23)
self.assertRaises(TypeError, math.trunc)
self.assertRaises(TypeError, math.trunc, 1, 2)
self.assertRaises((AttributeError, TypeError), math.trunc,
TestNoTrunc())
def testIsnan(self):
self.assertTrue(math.isnan(float("nan")))
self.assertTrue(math.isnan(float("inf")* 0.))
self.assertFalse(math.isnan(float("inf")))
self.assertFalse(math.isnan(0.))
self.assertFalse(math.isnan(1.))
def testIsinf(self):
self.assertTrue(math.isinf(float("inf")))
self.assertTrue(math.isinf(float("-inf")))
self.assertTrue(math.isinf(1E400))
self.assertTrue(math.isinf(-1E400))
self.assertFalse(math.isinf(float("nan")))
self.assertFalse(math.isinf(0.))
self.assertFalse(math.isinf(1.))
# RED_FLAG 16-Oct-2000 Tim
# While 2.0 is more consistent about exceptions than previous releases, it
# still fails this part of the test on some platforms. For now, we only
# *run* test_exceptions() in verbose mode, so that this isn't normally
# tested.
if verbose:
def test_exceptions(self):
try:
x = math.exp(-1000000000)
except:
# mathmodule.c is failing to weed out underflows from libm, or
# we've got an fp format with huge dynamic range
self.fail("underflowing exp() should not have raised "
"an exception")
if x != 0:
self.fail("underflowing exp() should have returned 0")
# If this fails, probably using a strict IEEE-754 conforming libm, and x
# is +Inf afterwards. But Python wants overflows detected by default.
try:
x = math.exp(1000000000)
except OverflowError:
pass
else:
self.fail("overflowing exp() didn't trigger OverflowError")
# If this fails, it could be a puzzle. One odd possibility is that
# mathmodule.c's macros are getting confused while comparing
# Inf (HUGE_VAL) to a NaN, and artificially setting errno to ERANGE
# as a result (and so raising OverflowError instead).
try:
x = math.sqrt(-1.0)
except ValueError:
pass
else:
self.fail("sqrt(-1) didn't raise ValueError")
@requires_IEEE_754
def test_testfile(self):
for id, fn, ar, ai, er, ei, flags in parse_testfile(test_file):
# Skip if either the input or result is complex, or if
# flags is nonempty
if ai != 0. or ei != 0. or flags:
continue
if fn in ['rect', 'polar']:
# no real versions of rect, polar
continue
func = getattr(math, fn)
try:
result = func(ar)
except ValueError:
message = ("Unexpected ValueError in " +
"test %s:%s(%r)\n" % (id, fn, ar))
self.fail(message)
except OverflowError:
message = ("Unexpected OverflowError in " +
"test %s:%s(%r)\n" % (id, fn, ar))
self.fail(message)
self.ftest("%s:%s(%r)" % (id, fn, ar), result, er)
@unittest.skipUnless(float.__getformat__("double").startswith("IEEE"),
"test requires IEEE 754 doubles")
def test_mtestfile(self):
ALLOWED_ERROR = 20 # permitted error, in ulps
fail_fmt = "{}:{}({!r}): expected {!r}, got {!r}"
failures = []
for id, fn, arg, expected, flags in parse_mtestfile(math_testcases):
func = getattr(math, fn)
if 'invalid' in flags or 'divide-by-zero' in flags:
expected = 'ValueError'
elif 'overflow' in flags:
expected = 'OverflowError'
try:
got = func(arg)
except ValueError:
got = 'ValueError'
except OverflowError:
got = 'OverflowError'
accuracy_failure = None
if isinstance(got, float) and isinstance(expected, float):
if math.isnan(expected) and math.isnan(got):
continue
if not math.isnan(expected) and not math.isnan(got):
if fn == 'lgamma':
# we use a weaker accuracy test for lgamma;
# lgamma only achieves an absolute error of
# a few multiples of the machine accuracy, in
# general.
accuracy_failure = acc_check(expected, got,
rel_err = 5e-15,
abs_err = 5e-15)
elif fn == 'erfc':
# erfc has less-than-ideal accuracy for large
# arguments (x ~ 25 or so), mainly due to the
# error involved in computing exp(-x*x).
#
# XXX Would be better to weaken this test only
# for large x, instead of for all x.
accuracy_failure = ulps_check(expected, got, 2000)
else:
accuracy_failure = ulps_check(expected, got, 20)
if accuracy_failure is None:
continue
if isinstance(got, str) and isinstance(expected, str):
if got == expected:
continue
fail_msg = fail_fmt.format(id, fn, arg, expected, got)
if accuracy_failure is not None:
fail_msg += ' ({})'.format(accuracy_failure)
failures.append(fail_msg)
if failures:
self.fail('Failures in test_mtestfile:\n ' +
'\n '.join(failures))
def test_main():
from doctest import DocFileSuite
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(MathTests))
suite.addTest(DocFileSuite("ieee754.txt"))
run_unittest(suite)
if __name__ == '__main__':
test_main()
|
tbinias/heekscnc
|
refs/heads/master
|
pycnc/wxNiceTextCtrl.py
|
25
|
import wx
import HeeksCNC
class DoubleCtrl(wx.TextCtrl):
def __init__(self, parent, id = wx.ID_ANY, factor = 1.0):
wx.TextCtrl.__init__(self, parent, id)
self.factor = factor
def GetValue(self):
try:
return float(wx.TextCtrl.GetValue(self))/self.factor
except:
return 0.0
def DoubleToString(self, value):
return str(value * self.factor)
def SetValue(self, value):
wx.TextCtrl.SetValue(self, self.DoubleToString(value))
class LengthCtrl(DoubleCtrl):
def __init__(self, parent, id = wx.ID_ANY):
factor = 1.0/HeeksCNC.cad.get_view_units()
DoubleCtrl.__init__(self, parent, id, factor)
class GeomCtrl(wx.TextCtrl):
# for now this is just a list of profile names with quotes around them and spaces between them, but later it might have a diagram showing the geometry
def __init__(self, parent, id):
wx.TextCtrl.__init__(self, parent, id)
def GetGeomList(self):
str = wx.TextCtrl.GetValue(self)
str = str.replace('\\', '/')
s = ""
geom_list = []
length = len(str)
name_started = False
for i in range(0, length):
if str[i] == '"':
if name_started:
geom_list.append(s)
s = ""
name_started = False
else:
name_started = True
elif str[i] == " " and (name_started == False):
if len(s)>0:
geom_list.append(s)
s = ""
else:
s += str[i]
if len(s)>0:
geom_list.append(s)
s = ""
return geom_list
def SetFromGeomList(self, geom_list):
first = True
str = ""
for geom in geom_list:
if first == False:
str = str + " "
else:
first = False
str += geom
wx.TextCtrl.SetValue(self, str)
|
jjhelmus/adventofcode
|
refs/heads/master
|
day09.py
|
1
|
from __future__ import print_function
import itertools
verbose = False
# read in the locations and distances from the file
#f = open('inputs/input09_test.txt')
f = open('inputs/input09.txt')
path = {}
locations = []
for line in f:
lline = line.split()
city1 = lline[0]
city2 = lline[2]
distance = int(lline[4])
path[city1 + city2] = distance
path[city2 + city1] = distance
locations.append(city1)
locations.append(city2)
f.close()
locations = set(locations) # find unique locations
if verbose:
print(locations)
print(path)
# find shortest route
shortest_route_length = 999999
for route in itertools.permutations(locations):
route_length = 0
for city1, city2 in zip(route[:-1], route[1:]):
route_length += path[city1 + city2]
if verbose:
print(route, route_length)
if route_length < shortest_route_length:
shortest_route_length = route_length
print("Shortest route length:", shortest_route_length)
# find longest route
longest_route_length = 0
for route in itertools.permutations(locations):
route_length = 0
for city1, city2 in zip(route[:-1], route[1:]):
route_length += path[city1 + city2]
if verbose:
print(route, route_length)
if route_length > longest_route_length:
longest_route_length = route_length
print("Longest route length:", longest_route_length)
|
dan-gittik/example
|
refs/heads/master
|
example/__main__.py
|
1
|
from .foo import Foo
from .bar import Bar
def main(*argv):
if len(argv) == 1 and argv[0] == 'foo':
foo = Foo()
print(foo.run())
return 0
if len(argv) == 1 and argv[0] == 'bar':
bar = Bar()
print(bar.run())
return 0
print('USAGE: python -m example <foo|bar>')
return 1
if __name__ == '__main__':
import sys
sys.exit(main(*sys.argv[1:]))
|
xeddmc/PyBitmessage
|
refs/heads/master
|
src/bitmessagecurses/__init__.py
|
12
|
# Copyright (c) 2014 Luke Montalvo <lukemontalvo@gmail.com>
# This file adds a alternative commandline interface, feel free to critique and fork
#
# This has only been tested on Arch Linux and Linux Mint
# Dependencies:
# * from python2-pip
# * python2-pythondialog
# * dialog
import os
import sys
import StringIO
from textwrap import *
import time
from time import strftime, localtime
from threading import Timer
import curses
import dialog
from dialog import Dialog
from helper_sql import *
import shared
import ConfigParser
from addresses import *
from pyelliptic.openssl import OpenSSL
import l10n
quit = False
menutab = 1
menu = ["Inbox", "Send", "Sent", "Your Identities", "Subscriptions", "Address Book", "Blacklist", "Network Status"]
naptime = 100
log = ""
logpad = None
inventorydata = 0
startuptime = time.time()
inbox = []
inboxcur = 0
sentbox = []
sentcur = 0
addresses = []
addrcur = 0
addrcopy = 0
subscriptions = []
subcur = 0
addrbook = []
abookcur = 0
blacklist = []
blackcur = 0
bwtype = "black"
BROADCAST_STR = "[Broadcast subscribers]"
class printLog:
def write(self, output):
global log
log += output
def flush(self):
pass
class errLog:
def write(self, output):
global log
log += "!"+output
def flush(self):
pass
printlog = printLog()
errlog = errLog()
def cpair(a):
r = curses.color_pair(a)
if r not in range(1, curses.COLOR_PAIRS-1):
r = curses.color_pair(0)
return r
def ascii(s):
r = ""
for c in s:
if ord(c) in range(128):
r += c
return r
def drawmenu(stdscr):
menustr = " "
for i in range(0, len(menu)):
if menutab == i+1:
menustr = menustr[:-1]
menustr += "["
menustr += str(i+1)+menu[i]
if menutab == i+1:
menustr += "] "
elif i != len(menu)-1:
menustr += " "
stdscr.addstr(2, 5, menustr, curses.A_UNDERLINE)
def resetlookups():
global inventorydata
inventorydata = shared.numberOfInventoryLookupsPerformed
shared.numberOfInventoryLookupsPerformed = 0
Timer(1, resetlookups, ()).start()
def drawtab(stdscr):
if menutab in range(1, len(menu)+1):
if menutab == 1: # Inbox
stdscr.addstr(3, 5, "To", curses.A_BOLD)
stdscr.addstr(3, 40, "From", curses.A_BOLD)
stdscr.addstr(3, 80, "Subject", curses.A_BOLD)
stdscr.addstr(3, 120, "Time Received", curses.A_BOLD)
stdscr.hline(4, 5, '-', 121)
for i, item in enumerate(inbox[max(min(len(inbox)-curses.LINES+6, inboxcur-5), 0):]):
if 6+i < curses.LINES:
a = 0
if i == inboxcur - max(min(len(inbox)-curses.LINES+6, inboxcur-5), 0): # Highlight current address
a = a | curses.A_REVERSE
if item[7] == False: # If not read, highlight
a = a | curses.A_BOLD
stdscr.addstr(5+i, 5, item[1][:34], a)
stdscr.addstr(5+i, 40, item[3][:39], a)
stdscr.addstr(5+i, 80, item[5][:39], a)
stdscr.addstr(5+i, 120, item[6][:39], a)
elif menutab == 3: # Sent
stdscr.addstr(3, 5, "To", curses.A_BOLD)
stdscr.addstr(3, 40, "From", curses.A_BOLD)
stdscr.addstr(3, 80, "Subject", curses.A_BOLD)
stdscr.addstr(3, 120, "Status", curses.A_BOLD)
stdscr.hline(4, 5, '-', 121)
for i, item in enumerate(sentbox[max(min(len(sentbox)-curses.LINES+6, sentcur-5), 0):]):
if 6+i < curses.LINES:
a = 0
if i == sentcur - max(min(len(sentbox)-curses.LINES+6, sentcur-5), 0): # Highlight current address
a = a | curses.A_REVERSE
stdscr.addstr(5+i, 5, item[0][:34], a)
stdscr.addstr(5+i, 40, item[2][:39], a)
stdscr.addstr(5+i, 80, item[4][:39], a)
stdscr.addstr(5+i, 120, item[5][:39], a)
elif menutab == 2 or menutab == 4: # Send or Identities
stdscr.addstr(3, 5, "Label", curses.A_BOLD)
stdscr.addstr(3, 40, "Address", curses.A_BOLD)
stdscr.addstr(3, 80, "Stream", curses.A_BOLD)
stdscr.hline(4, 5, '-', 81)
for i, item in enumerate(addresses[max(min(len(addresses)-curses.LINES+6, addrcur-5), 0):]):
if 6+i < curses.LINES:
a = 0
if i == addrcur - max(min(len(addresses)-curses.LINES+6, addrcur-5), 0): # Highlight current address
a = a | curses.A_REVERSE
if item[1] == True and item[3] not in [8,9]: # Embolden enabled, non-special addresses
a = a | curses.A_BOLD
stdscr.addstr(5+i, 5, item[0][:34], a)
stdscr.addstr(5+i, 40, item[2][:39], cpair(item[3]) | a)
stdscr.addstr(5+i, 80, str(1)[:39], a)
elif menutab == 5: # Subscriptions
stdscr.addstr(3, 5, "Label", curses.A_BOLD)
stdscr.addstr(3, 80, "Address", curses.A_BOLD)
stdscr.addstr(3, 120, "Enabled", curses.A_BOLD)
stdscr.hline(4, 5, '-', 121)
for i, item in enumerate(subscriptions[max(min(len(subscriptions)-curses.LINES+6, subcur-5), 0):]):
if 6+i < curses.LINES:
a = 0
if i == subcur - max(min(len(subscriptions)-curses.LINES+6, subcur-5), 0): # Highlight current address
a = a | curses.A_REVERSE
if item[2] == True: # Embolden enabled subscriptions
a = a | curses.A_BOLD
stdscr.addstr(5+i, 5, item[0][:74], a)
stdscr.addstr(5+i, 80, item[1][:39], a)
stdscr.addstr(5+i, 120, str(item[2]), a)
elif menutab == 6: # Address book
stdscr.addstr(3, 5, "Label", curses.A_BOLD)
stdscr.addstr(3, 40, "Address", curses.A_BOLD)
stdscr.hline(4, 5, '-', 41)
for i, item in enumerate(addrbook[max(min(len(addrbook)-curses.LINES+6, abookcur-5), 0):]):
if 6+i < curses.LINES:
a = 0
if i == abookcur - max(min(len(addrbook)-curses.LINES+6, abookcur-5), 0): # Highlight current address
a = a | curses.A_REVERSE
stdscr.addstr(5+i, 5, item[0][:34], a)
stdscr.addstr(5+i, 40, item[1][:39], a)
elif menutab == 7: # Blacklist
stdscr.addstr(3, 5, "Type: "+bwtype)
stdscr.addstr(4, 5, "Label", curses.A_BOLD)
stdscr.addstr(4, 80, "Address", curses.A_BOLD)
stdscr.addstr(4, 120, "Enabled", curses.A_BOLD)
stdscr.hline(5, 5, '-', 121)
for i, item in enumerate(blacklist[max(min(len(blacklist)-curses.LINES+6, blackcur-5), 0):]):
if 7+i < curses.LINES:
a = 0
if i == blackcur - max(min(len(blacklist)-curses.LINES+6, blackcur-5), 0): # Highlight current address
a = a | curses.A_REVERSE
if item[2] == True: # Embolden enabled subscriptions
a = a | curses.A_BOLD
stdscr.addstr(6+i, 5, item[0][:74], a)
stdscr.addstr(6+i, 80, item[1][:39], a)
stdscr.addstr(6+i, 120, str(item[2]), a)
elif menutab == 8: # Network status
# Connection data
stdscr.addstr(4, 5, "Total Connections: "+str(len(shared.connectedHostsList)).ljust(2))
stdscr.addstr(6, 6, "Stream #", curses.A_BOLD)
stdscr.addstr(6, 18, "Connections", curses.A_BOLD)
stdscr.hline(7, 6, '-', 23)
streamcount = []
for host, stream in shared.connectedHostsList.items():
if stream >= len(streamcount):
streamcount.append(1)
else:
streamcount[stream] += 1
for i, item in enumerate(streamcount):
if i < 4:
if i == 0:
stdscr.addstr(8+i, 6, "?")
else:
stdscr.addstr(8+i, 6, str(i))
stdscr.addstr(8+i, 18, str(item).ljust(2))
# Uptime and processing data
stdscr.addstr(6, 35, "Since startup on "+l10n.formatTimestamp(startuptime, False))
stdscr.addstr(7, 40, "Processed "+str(shared.numberOfMessagesProcessed).ljust(4)+" person-to-person messages.")
stdscr.addstr(8, 40, "Processed "+str(shared.numberOfBroadcastsProcessed).ljust(4)+" broadcast messages.")
stdscr.addstr(9, 40, "Processed "+str(shared.numberOfPubkeysProcessed).ljust(4)+" public keys.")
# Inventory data
stdscr.addstr(11, 35, "Inventory lookups per second: "+str(inventorydata).ljust(3))
# Log
stdscr.addstr(13, 6, "Log", curses.A_BOLD)
n = log.count('\n')
if n > 0:
l = log.split('\n')
if n > 512:
del l[:(n-256)]
logpad.erase()
n = len(l)
for i, item in enumerate(l):
a = 0
if len(item) > 0 and item[0] == '!':
a = curses.color_pair(1)
item = item[1:]
logpad.addstr(i, 0, item, a)
logpad.refresh(n-curses.LINES+2, 0, 14, 6, curses.LINES-2, curses.COLS-7)
stdscr.refresh()
def redraw(stdscr):
stdscr.erase()
stdscr.border()
drawmenu(stdscr)
stdscr.refresh()
def dialogreset(stdscr):
stdscr.clear()
stdscr.keypad(1)
curses.curs_set(0)
def handlech(c, stdscr):
if c != curses.ERR:
global inboxcur, addrcur, sentcur, subcur, abookcur, blackcur
if c in range(256):
if chr(c) in '12345678':
global menutab
menutab = int(chr(c))
elif chr(c) == 'q':
global quit
quit = True
elif chr(c) == '\n':
curses.curs_set(1)
d = Dialog(dialog="dialog")
if menutab == 1:
d.set_background_title("Inbox Message Dialog Box")
r, t = d.menu("Do what with \""+inbox[inboxcur][5]+"\" from \""+inbox[inboxcur][3]+"\"?",
choices=[("1", "View message"),
("2", "Mark message as unread"),
("3", "Reply"),
("4", "Add sender to Address Book"),
("5", "Save message as text file"),
("6", "Move to trash")])
if r == d.DIALOG_OK:
if t == "1": # View
d.set_background_title("\""+inbox[inboxcur][5]+"\" from \""+inbox[inboxcur][3]+"\" to \""+inbox[inboxcur][1]+"\"")
data = ""
ret = sqlQuery("SELECT message FROM inbox WHERE msgid=?", inbox[inboxcur][0])
if ret != []:
for row in ret:
data, = row
data = shared.fixPotentiallyInvalidUTF8Data(data)
msg = ""
for i, item in enumerate(data.split("\n")):
msg += fill(item, replace_whitespace=False)+"\n"
d.scrollbox(unicode(ascii(msg)), 30, 80, exit_label="Continue")
sqlExecute("UPDATE inbox SET read=1 WHERE msgid=?", inbox[inboxcur][0])
inbox[inboxcur][7] = 1
else:
d.scrollbox(unicode("Could not fetch message."), exit_label="Continue")
elif t == "2": # Mark unread
sqlExecute("UPDATE inbox SET read=0 WHERE msgid=?", inbox[inboxcur][0])
inbox[inboxcur][7] = 0
elif t == "3": # Reply
curses.curs_set(1)
m = inbox[inboxcur]
fromaddr = m[4]
ischan = False
for i, item in enumerate(addresses):
if fromaddr == item[2] and item[3] != 0:
ischan = True
break
if not addresses[i][1]:
d.scrollbox(unicode("Sending address disabled, please either enable it or choose a different address."), exit_label="Continue")
return
toaddr = m[2]
if ischan:
toaddr = fromaddr
subject = m[5]
if not m[5][:4] == "Re: ":
subject = "Re: "+m[5]
body = ""
ret = sqlQuery("SELECT message FROM inbox WHERE msgid=?", m[0])
if ret != []:
body = "\n\n------------------------------------------------------\n"
for row in ret:
body, = row
sendMessage(fromaddr, toaddr, ischan, subject, body, True)
dialogreset(stdscr)
elif t == "4": # Add to Address Book
global addrbook
addr = inbox[inboxcur][4]
if addr not in [item[1] for i,item in enumerate(addrbook)]:
r, t = d.inputbox("Label for address \""+addr+"\"")
if r == d.DIALOG_OK:
label = t
sqlExecute("INSERT INTO addressbook VALUES (?,?)", label, addr)
# Prepend entry
addrbook.reverse()
addrbook.append([label, addr])
addrbook.reverse()
else:
d.scrollbox(unicode("The selected address is already in the Address Book."), exit_label="Continue")
elif t == "5": # Save message
d.set_background_title("Save \""+inbox[inboxcur][5]+"\" as text file")
r, t = d.inputbox("Filename", init=inbox[inboxcur][5]+".txt")
if r == d.DIALOG_OK:
msg = ""
ret = sqlQuery("SELECT message FROM inbox WHERE msgid=?", inbox[inboxcur][0])
if ret != []:
for row in ret:
msg, = row
fh = open(t, "a") # Open in append mode just in case
fh.write(msg)
fh.close()
else:
d.scrollbox(unicode("Could not fetch message."), exit_label="Continue")
elif t == "6": # Move to trash
sqlExecute("UPDATE inbox SET folder='trash' WHERE msgid=?", inbox[inboxcur][0])
del inbox[inboxcur]
d.scrollbox(unicode("Message moved to trash. There is no interface to view your trash, \nbut the message is still on disk if you are desperate to recover it."),
exit_label="Continue")
elif menutab == 2:
a = ""
if addresses[addrcur][3] != 0: # if current address is a chan
a = addresses[addrcur][2]
sendMessage(addresses[addrcur][2], a)
elif menutab == 3:
d.set_background_title("Sent Messages Dialog Box")
r, t = d.menu("Do what with \""+sentbox[sentcur][4]+"\" to \""+sentbox[sentcur][0]+"\"?",
choices=[("1", "View message"),
("2", "Move to trash")])
if r == d.DIALOG_OK:
if t == "1": # View
d.set_background_title("\""+sentbox[sentcur][4]+"\" from \""+sentbox[sentcur][3]+"\" to \""+sentbox[sentcur][1]+"\"")
data = ""
ret = sqlQuery("SELECT message FROM sent WHERE subject=? AND ackdata=?", sentbox[sentcur][4], sentbox[sentcur][6])
if ret != []:
for row in ret:
data, = row
data = shared.fixPotentiallyInvalidUTF8Data(data)
msg = ""
for i, item in enumerate(data.split("\n")):
msg += fill(item, replace_whitespace=False)+"\n"
d.scrollbox(unicode(ascii(msg)), 30, 80, exit_label="Continue")
else:
d.scrollbox(unicode("Could not fetch message."), exit_label="Continue")
elif t == "2": # Move to trash
sqlExecute("UPDATE sent SET folder='trash' WHERE subject=? AND ackdata=?", sentbox[sentcur][4], sentbox[sentcur][6])
del sentbox[sentcur]
d.scrollbox(unicode("Message moved to trash. There is no interface to view your trash, \nbut the message is still on disk if you are desperate to recover it."),
exit_label="Continue")
elif menutab == 4:
d.set_background_title("Your Identities Dialog Box")
r, t = d.menu("Do what with \""+addresses[addrcur][0]+"\" : \""+addresses[addrcur][2]+"\"?",
choices=[("1", "Create new address"),
("2", "Send a message from this address"),
("3", "Rename"),
("4", "Enable"),
("5", "Disable"),
("6", "Delete"),
("7", "Special address behavior")])
if r == d.DIALOG_OK:
if t == "1": # Create new address
d.set_background_title("Create new address")
d.scrollbox(unicode("Here you may generate as many addresses as you like.\n"
"Indeed, creating and abandoning addresses is encouraged.\n"
"Deterministic addresses have several pros and cons:\n"
"\nPros:\n"
" * You can recreate your addresses on any computer from memory\n"
" * You need not worry about backing up your keys.dat file as long as you \n can remember your passphrase\n"
"Cons:\n"
" * You must remember (or write down) your passphrase in order to recreate \n your keys if they are lost\n"
" * You must also remember the address version and stream numbers\n"
" * If you choose a weak passphrase someone may be able to brute-force it \n and then send and receive messages as you"),
exit_label="Continue")
r, t = d.menu("Choose an address generation technique",
choices=[("1", "Use a random number generator"),
("2", "Use a passphrase")])
if r == d.DIALOG_OK:
if t == "1":
d.set_background_title("Randomly generate address")
r, t = d.inputbox("Label (not shown to anyone except you)")
label = ""
if r == d.DIALOG_OK and len(t) > 0:
label = t
r, t = d.menu("Choose a stream",
choices=[("1", "Use the most available stream"),("", "(Best if this is the first of many addresses you will create)"),
("2", "Use the same stream as an existing address"),("", "(Saves you some bandwidth and processing power)")])
if r == d.DIALOG_OK:
if t == "1":
stream = 1
elif t == "2":
addrs = []
for i, item in enumerate(addresses):
addrs.append([str(i), item[2]])
r, t = d.menu("Choose an existing address's stream", choices=addrs)
if r == d.DIALOG_OK:
stream = decodeAddress(addrs[int(t)][1])[2]
shorten = False
r, t = d.checklist("Miscellaneous options",
choices=[("1", "Spend time shortening the address", shorten)])
if r == d.DIALOG_OK and "1" in t:
shorten = True
shared.addressGeneratorQueue.put(("createRandomAddress", 4, stream, label, 1, "", shorten))
elif t == "2":
d.set_background_title("Make deterministic addresses")
r, t = d.passwordform("Enter passphrase",
[("Passphrase", 1, 1, "", 2, 1, 64, 128),
("Confirm passphrase", 3, 1, "", 4, 1, 64, 128)],
form_height=4, insecure=True)
if r == d.DIALOG_OK:
if t[0] == t[1]:
passphrase = t[0]
r, t = d.rangebox("Number of addresses to generate",
width=48, min=1, max=99, init=8)
if r == d.DIALOG_OK:
number = t
stream = 1
shorten = False
r, t = d.checklist("Miscellaneous options",
choices=[("1", "Spend time shortening the address", shorten)])
if r == d.DIALOG_OK and "1" in t:
shorten = True
d.scrollbox(unicode("In addition to your passphrase, be sure to remember the following numbers:\n"
"\n * Address version number: "+str(4)+"\n"
" * Stream number: "+str(stream)),
exit_label="Continue")
shared.addressGeneratorQueue.put(('createDeterministicAddresses', 4, stream, "unused deterministic address", number, str(passphrase), shorten))
else:
d.scrollbox(unicode("Passphrases do not match"), exit_label="Continue")
elif t == "2": # Send a message
a = ""
if addresses[addrcur][3] != 0: # if current address is a chan
a = addresses[addrcur][2]
sendMessage(addresses[addrcur][2], a)
elif t == "3": # Rename address label
a = addresses[addrcur][2]
label = addresses[addrcur][0]
r, t = d.inputbox("New address label", init=label)
if r == d.DIALOG_OK:
label = t
shared.config.set(a, "label", label)
# Write config
shared.writeKeysFile()
addresses[addrcur][0] = label
elif t == "4": # Enable address
a = addresses[addrcur][2]
shared.config.set(a, "enabled", "true") # Set config
# Write config
shared.writeKeysFile()
# Change color
if shared.safeConfigGetBoolean(a, 'chan'):
addresses[addrcur][3] = 9 # orange
elif shared.safeConfigGetBoolean(a, 'mailinglist'):
addresses[addrcur][3] = 5 # magenta
else:
addresses[addrcur][3] = 0 # black
addresses[addrcur][1] = True
shared.reloadMyAddressHashes() # Reload address hashes
elif t == "5": # Disable address
a = addresses[addrcur][2]
shared.config.set(a, "enabled", "false") # Set config
addresses[addrcur][3] = 8 # Set color to gray
# Write config
shared.writeKeysFile()
addresses[addrcur][1] = False
shared.reloadMyAddressHashes() # Reload address hashes
elif t == "6": # Delete address
r, t = d.inputbox("Type in \"I want to delete this address\"", width=50)
if r == d.DIALOG_OK and t == "I want to delete this address":
shared.config.remove_section(addresses[addrcur][2])
shared.writeKeysFile()
del addresses[addrcur]
elif t == "7": # Special address behavior
a = addresses[addrcur][2]
d.set_background_title("Special address behavior")
if shared.safeConfigGetBoolean(a, "chan"):
d.scrollbox(unicode("This is a chan address. You cannot use it as a pseudo-mailing list."), exit_label="Continue")
else:
m = shared.safeConfigGetBoolean(a, "mailinglist")
r, t = d.radiolist("Select address behavior",
choices=[("1", "Behave as a normal address", not m),
("2", "Behave as a pseudo-mailing-list address", m)])
if r == d.DIALOG_OK:
if t == "1" and m == True:
shared.config.set(a, "mailinglist", "false")
if addresses[addrcur][1]:
addresses[addrcur][3] = 0 # Set color to black
else:
addresses[addrcur][3] = 8 # Set color to gray
elif t == "2" and m == False:
try:
mn = shared.config.get(a, "mailinglistname")
except ConfigParser.NoOptionError:
mn = ""
r, t = d.inputbox("Mailing list name", init=mn)
if r == d.DIALOG_OK:
mn = t
shared.config.set(a, "mailinglist", "true")
shared.config.set(a, "mailinglistname", mn)
addresses[addrcur][3] = 6 # Set color to magenta
# Write config
shared.writeKeysFile()
elif menutab == 5:
d.set_background_title("Subscriptions Dialog Box")
r, t = d.menu("Do what with subscription to \""+subscriptions[subcur][0]+"\"?",
choices=[("1", "Add new subscription"),
("2", "Delete this subscription"),
("3", "Enable"),
("4", "Disable")])
if r == d.DIALOG_OK:
if t == "1":
r, t = d.inputbox("New subscription address")
if r == d.DIALOG_OK:
addr = addBMIfNotPresent(t)
if not shared.isAddressInMySubscriptionsList(addr):
r, t = d.inputbox("New subscription label")
if r == d.DIALOG_OK:
label = t
# Prepend entry
subscriptions.reverse()
subscriptions.append([label, addr, True])
subscriptions.reverse()
sqlExecute("INSERT INTO subscriptions VALUES (?,?,?)", label, address, True)
shared.reloadBroadcastSendersForWhichImWatching()
elif t == "2":
r, t = d.inpuxbox("Type in \"I want to delete this subscription\"")
if r == d.DIALOG_OK and t == "I want to delete this subscription":
sqlExecute("DELETE FROM subscriptions WHERE label=? AND address=?", subscriptions[subcur][0], subscriptions[subcur][1])
shared.reloadBroadcastSendersForWhichImWatching()
del subscriptions[subcur]
elif t == "3":
sqlExecute("UPDATE subscriptions SET enabled=1 WHERE label=? AND address=?", subscriptions[subcur][0], subscriptions[subcur][1])
shared.reloadBroadcastSendersForWhichImWatching()
subscriptions[subcur][2] = True
elif t == "4":
sqlExecute("UPDATE subscriptions SET enabled=0 WHERE label=? AND address=?", subscriptions[subcur][0], subscriptions[subcur][1])
shared.reloadBroadcastSendersForWhichImWatching()
subscriptions[subcur][2] = False
elif menutab == 6:
d.set_background_title("Address Book Dialog Box")
r, t = d.menu("Do what with \""+addrbook[abookcur][0]+"\" : \""+addrbook[abookcur][1]+"\"",
choices=[("1", "Send a message to this address"),
("2", "Subscribe to this address"),
("3", "Add new address to Address Book"),
("4", "Delete this address")])
if r == d.DIALOG_OK:
if t == "1":
sendMessage(recv=addrbook[abookcur][1])
elif t == "2":
r, t = d.inputbox("New subscription label")
if r == d.DIALOG_OK:
label = t
# Prepend entry
subscriptions.reverse()
subscriptions.append([label, addr, True])
subscriptions.reverse()
sqlExecute("INSERT INTO subscriptions VALUES (?,?,?)", label, address, True)
shared.reloadBroadcastSendersForWhichImWatching()
elif t == "3":
r, t = d.inputbox("Input new address")
if r == d.DIALOG_OK:
addr = t
if addr not in [item[1] for i,item in enumerate(addrbook)]:
r, t = d.inputbox("Label for address \""+addr+"\"")
if r == d.DIALOG_OK:
sqlExecute("INSERT INTO addressbook VALUES (?,?)", t, addr)
# Prepend entry
addrbook.reverse()
addrbook.append([t, addr])
addrbook.reverse()
else:
d.scrollbox(unicode("The selected address is already in the Address Book."), exit_label="Continue")
elif t == "4":
r, t = d.inputbox("Type in \"I want to delete this Address Book entry\"")
if r == d.DIALOG_OK and t == "I want to delete this Address Book entry":
sqlExecute("DELETE FROM addressbook WHERE label=? AND address=?", addrbook[abookcur][0], addrbook[abookcur][1])
del addrbook[abookcur]
elif menutab == 7:
d.set_background_title("Blacklist Dialog Box")
r, t = d.menu("Do what with \""+blacklist[blackcur][0]+"\" : \""+blacklist[blackcur][1]+"\"?",
choices=[("1", "Delete"),
("2", "Enable"),
("3", "Disable")])
if r == d.DIALOG_OK:
if t == "1":
r, t = d.inputbox("Type in \"I want to delete this Blacklist entry\"")
if r == d.DIALOG_OK and t == "I want to delete this Blacklist entry":
sqlExecute("DELETE FROM blacklist WHERE label=? AND address=?", blacklist[blackcur][0], blacklist[blackcur][1])
del blacklist[blackcur]
elif t == "2":
sqlExecute("UPDATE blacklist SET enabled=1 WHERE label=? AND address=?", blacklist[blackcur][0], blacklist[blackcur][1])
blacklist[blackcur][2] = True
elif t== "3":
sqlExecute("UPDATE blacklist SET enabled=0 WHERE label=? AND address=?", blacklist[blackcur][0], blacklist[blackcur][1])
blacklist[blackcur][2] = False
dialogreset(stdscr)
else:
if c == curses.KEY_UP:
if menutab == 1 and inboxcur > 0:
inboxcur -= 1
if (menutab == 2 or menutab == 4) and addrcur > 0:
addrcur -= 1
if menutab == 3 and sentcur > 0:
sentcur -= 1
if menutab == 5 and subcur > 0:
subcur -= 1
if menutab == 6 and abookcur > 0:
abookcur -= 1
if menutab == 7 and blackcur > 0:
blackcur -= 1
elif c == curses.KEY_DOWN:
if menutab == 1 and inboxcur < len(inbox)-1:
inboxcur += 1
if (menutab == 2 or menutab == 4) and addrcur < len(addresses)-1:
addrcur += 1
if menutab == 3 and sentcur < len(sentbox)-1:
sentcur += 1
if menutab == 5 and subcur < len(subscriptions)-1:
subcur += 1
if menutab == 6 and abookcur < len(addrbook)-1:
abookcur += 1
if menutab == 7 and blackcur < len(blacklist)-1:
blackcur += 1
elif c == curses.KEY_HOME:
if menutab == 1:
inboxcur = 0
if menutab == 2 or menutab == 4:
addrcur = 0
if menutab == 3:
sentcur = 0
if menutab == 5:
subcur = 0
if menutab == 6:
abookcur = 0
if menutab == 7:
blackcur = 0
elif c == curses.KEY_END:
if menutab == 1:
inboxcur = len(inbox)-1
if menutab == 2 or menutab == 4:
addrcur = len(addresses)-1
if menutab == 3:
sentcur = len(sentbox)-1
if menutab == 5:
subcur = len(subscriptions)-1
if menutab == 6:
abookcur = len(addrbook)-1
if menutab == 7:
blackcur = len(blackcur)-1
redraw(stdscr)
def sendMessage(sender="", recv="", broadcast=None, subject="", body="", reply=False):
if sender == "":
return
d = Dialog(dialog="dialog")
d.set_background_title("Send a message")
if recv == "":
r, t = d.inputbox("Recipient address (Cancel to load from the Address Book or leave blank to broadcast)", 10, 60)
if r != d.DIALOG_OK:
global menutab
menutab = 6
return
recv = t
if broadcast == None and sender != recv:
r, t = d.radiolist("How to send the message?",
choices=[("1", "Send to one or more specific people", True),
("2", "Broadcast to everyone who is subscribed to your address", False)])
if r != d.DIALOG_OK:
return
broadcast = False
if t == "2": # Broadcast
broadcast = True
if subject == "" or reply:
r, t = d.inputbox("Message subject", width=60, init=subject)
if r != d.DIALOG_OK:
return
subject = t
if body == "" or reply:
r, t = d.inputbox("Message body", 10, 80, init=body)
if r != d.DIALOG_OK:
return
body = t
body = body.replace("\\n", "\n").replace("\\t", "\t")
if not broadcast:
recvlist = []
for i, item in enumerate(recv.replace(",", ";").split(";")):
recvlist.append(item.strip())
list(set(recvlist)) # Remove exact duplicates
for addr in recvlist:
if addr != "":
status, version, stream, ripe = decodeAddress(addr)
if status != "success":
d.set_background_title("Recipient address error")
err = "Could not decode" + addr + " : " + status + "\n\n"
if status == "missingbm":
err += "Bitmessage addresses should start with \"BM-\"."
elif status == "checksumfailed":
err += "The address was not typed or copied correctly."
elif status == "invalidcharacters":
err += "The address contains invalid characters."
elif status == "versiontoohigh":
err += "The address version is too high. Either you need to upgrade your Bitmessage software or your acquaintance is doing something clever."
elif status == "ripetooshort":
err += "Some data encoded in the address is too short. There might be something wrong with the software of your acquaintance."
elif status == "ripetoolong":
err += "Some data encoded in the address is too long. There might be something wrong with the software of your acquaintance."
elif status == "varintmalformed":
err += "Some data encoded in the address is malformed. There might be something wrong with the software of your acquaintance."
else:
err += "It is unknown what is wrong with the address."
d.scrollbox(unicode(err), exit_label="Continue")
else:
addr = addBMIfNotPresent(addr)
if version > 4 or version <= 1:
d.set_background_title("Recipient address error")
d.scrollbox(unicode("Could not understand version number " + version + "of address" + addr + "."),
exit_label="Continue")
continue
if stream > 1 or stream == 0:
d.set_background_title("Recipient address error")
d.scrollbox(unicode("Bitmessage currently only supports stream numbers of 1, unlike as requested for address " + addr + "."),
exit_label="Continue")
continue
if len(shared.connectedHostsList) == 0:
d.set_background_title("Not connected warning")
d.scrollbox(unicode("Because you are not currently connected to the network, "),
exit_label="Continue")
ackdata = OpenSSL.rand(32)
sqlExecute(
"INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
"",
addr,
ripe,
sender,
subject,
body,
ackdata,
int(time.time()), # sentTime (this will never change)
int(time.time()), # lastActionTime
0, # sleepTill time. This will get set when the POW gets done.
"msgqueued",
0, # retryNumber
"sent",
2, # encodingType
shared.config.getint('bitmessagesettings', 'ttl'))
shared.workerQueue.put(("sendmessage", addr))
else: # Broadcast
if recv == "":
d.set_background_title("Empty sender error")
d.scrollbox(unicode("You must specify an address to send the message from."),
exit_label="Continue")
else:
ackdata = OpenSSL.rand(32)
recv = BROADCAST_STR
ripe = ""
sqlExecute(
"INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
"",
recv,
ripe,
sender,
subject,
body,
ackdata,
int(time.time()), # sentTime (this will never change)
int(time.time()), # lastActionTime
0, # sleepTill time. This will get set when the POW gets done.
"broadcastqueued",
0, # retryNumber
"sent", # folder
2, # encodingType
shared.config.getint('bitmessagesettings', 'ttl'))
shared.workerQueue.put(('sendbroadcast', ''))
def loadInbox():
sys.stdout = sys.__stdout__
print("Loading inbox messages...")
sys.stdout = printlog
where = "toaddress || fromaddress || subject || message"
what = "%%"
ret = sqlQuery("""SELECT msgid, toaddress, fromaddress, subject, received, read
FROM inbox WHERE folder='inbox' AND %s LIKE ?
ORDER BY received
""" % (where,), what)
global inbox
for row in ret:
msgid, toaddr, fromaddr, subject, received, read = row
subject = ascii(shared.fixPotentiallyInvalidUTF8Data(subject))
# Set label for to address
try:
if toaddr == BROADCAST_STR:
tolabel = BROADCAST_STR
else:
tolabel = shared.config.get(toaddr, "label")
except:
tolabel = ""
if tolabel == "":
tolabel = toaddr
tolabel = shared.fixPotentiallyInvalidUTF8Data(tolabel)
# Set label for from address
fromlabel = ""
if shared.config.has_section(fromaddr):
fromlabel = shared.config.get(fromaddr, "label")
if fromlabel == "": # Check Address Book
qr = sqlQuery("SELECT label FROM addressbook WHERE address=?", fromaddr)
if qr != []:
for r in qr:
fromlabel, = r
if fromlabel == "": # Check Subscriptions
qr = sqlQuery("SELECT label FROM subscriptions WHERE address=?", fromaddr)
if qr != []:
for r in qr:
fromlabel, = r
if fromlabel == "":
fromlabel = fromaddr
fromlabel = shared.fixPotentiallyInvalidUTF8Data(fromlabel)
# Load into array
inbox.append([msgid, tolabel, toaddr, fromlabel, fromaddr, subject,
l10n.formatTimestamp(received, False), read])
inbox.reverse()
def loadSent():
sys.stdout = sys.__stdout__
print("Loading sent messages...")
sys.stdout = printlog
where = "toaddress || fromaddress || subject || message"
what = "%%"
ret = sqlQuery("""SELECT toaddress, fromaddress, subject, status, ackdata, lastactiontime
FROM sent WHERE folder='sent' AND %s LIKE ?
ORDER BY lastactiontime
""" % (where,), what)
global sent
for row in ret:
toaddr, fromaddr, subject, status, ackdata, lastactiontime = row
subject = ascii(shared.fixPotentiallyInvalidUTF8Data(subject))
# Set label for to address
tolabel = ""
qr = sqlQuery("SELECT label FROM addressbook WHERE address=?", toaddr)
if qr != []:
for r in qr:
tolabel, = r
if tolabel == "":
qr = sqlQuery("SELECT label FROM subscriptions WHERE address=?", toaddr)
if qr != []:
for r in qr:
tolabel, = r
if tolabel == "":
if shared.config.has_section(toaddr):
tolabel = shared.config.get(toaddr, "label")
if tolabel == "":
tolabel = toaddr
# Set label for from address
fromlabel = ""
if shared.config.has_section(fromaddr):
fromlabel = shared.config.get(fromaddr, "label")
if fromlabel == "":
fromlabel = fromaddr
# Set status string
if status == "awaitingpubkey":
statstr = "Waiting for their public key. Will request it again soon"
elif status == "doingpowforpubkey":
statstr = "Encryption key request queued"
elif status == "msgqueued":
statstr = "Message queued"
elif status == "msgsent":
t = l10n.formatTimestamp(lastactiontime, False)
statstr = "Message sent at "+t+".Waiting for acknowledgement."
elif status == "msgsentnoackexpected":
t = l10n.formatTimestamp(lastactiontime, False)
statstr = "Message sent at "+t+"."
elif status == "doingmsgpow":
statstr = "The proof of work required to send the message has been queued."
elif status == "askreceived":
t = l10n.formatTimestamp(lastactiontime, False)
statstr = "Acknowledgment of the message received at "+t+"."
elif status == "broadcastqueued":
statstr = "Broadcast queued."
elif status == "broadcastsent":
t = l10n.formatTimestamp(lastactiontime, False)
statstr = "Broadcast sent at "+t+"."
elif status == "forcepow":
statstr = "Forced difficulty override. Message will start sending soon."
elif status == "badkey":
statstr = "Warning: Could not encrypt message because the recipient's encryption key is no good."
elif status == "toodifficult":
statstr = "Error: The work demanded by the recipient is more difficult than you are willing to do."
else:
t = l10n.formatTimestamp(lastactiontime, False)
statstr = "Unknown status "+status+" at "+t+"."
# Load into array
sentbox.append([tolabel, toaddr, fromlabel, fromaddr, subject, statstr, ackdata,
l10n.formatTimestamp(lastactiontime, False)])
sentbox.reverse()
def loadAddrBook():
sys.stdout = sys.__stdout__
print("Loading address book...")
sys.stdout = printlog
ret = sqlQuery("SELECT label, address FROM addressbook")
global addrbook
for row in ret:
label, addr = row
label = shared.fixPotentiallyInvalidUTF8Data(label)
addrbook.append([label, addr])
addrbook.reverse()
def loadSubscriptions():
ret = sqlQuery("SELECT label, address, enabled FROM subscriptions")
for row in ret:
label, address, enabled = row
subscriptions.append([label, address, enabled])
subscriptions.reverse()
def loadBlackWhiteList():
global bwtype
bwtype = shared.config.get("bitmessagesettings", "blackwhitelist")
if bwtype == "black":
ret = sqlQuery("SELECT label, address, enabled FROM blacklist")
else:
ret = sqlQuery("SELECT label, address, enabled FROM whitelist")
for row in ret:
label, address, enabled = row
blacklist.append([label, address, enabled])
blacklist.reverse()
def runwrapper():
sys.stdout = printlog
#sys.stderr = errlog
# Load messages from database
loadInbox()
loadSent()
loadAddrBook()
loadSubscriptions()
loadBlackWhiteList()
stdscr = curses.initscr()
global logpad
logpad = curses.newpad(1024, curses.COLS)
stdscr.nodelay(0)
curses.curs_set(0)
stdscr.timeout(1000)
curses.wrapper(run)
shutdown()
def run(stdscr):
# Schedule inventory lookup data
resetlookups()
# Init color pairs
if curses.has_colors():
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK) # red
curses.init_pair(2, curses.COLOR_GREEN, curses.COLOR_BLACK) # green
curses.init_pair(3, curses.COLOR_YELLOW, curses.COLOR_BLACK) # yellow
curses.init_pair(4, curses.COLOR_BLUE, curses.COLOR_BLACK) # blue
curses.init_pair(5, curses.COLOR_MAGENTA, curses.COLOR_BLACK) # magenta
curses.init_pair(6, curses.COLOR_CYAN, curses.COLOR_BLACK) # cyan
curses.init_pair(7, curses.COLOR_WHITE, curses.COLOR_BLACK) # white
if curses.can_change_color():
curses.init_color(8, 500, 500, 500) # gray
curses.init_pair(8, 8, 0)
curses.init_color(9, 844, 465, 0) # orange
curses.init_pair(9, 9, 0)
else:
curses.init_pair(8, curses.COLOR_WHITE, curses.COLOR_BLACK) # grayish
curses.init_pair(9, curses.COLOR_YELLOW, curses.COLOR_BLACK) # orangish
# Init list of address in 'Your Identities' tab
configSections = shared.config.sections()
for addressInKeysFile in configSections:
if addressInKeysFile != "bitmessagesettings":
isEnabled = shared.config.getboolean(addressInKeysFile, "enabled")
addresses.append([shared.config.get(addressInKeysFile, "label"), isEnabled, addressInKeysFile])
# Set address color
if not isEnabled:
addresses[len(addresses)-1].append(8) # gray
elif shared.safeConfigGetBoolean(addressInKeysFile, 'chan'):
addresses[len(addresses)-1].append(9) # orange
elif shared.safeConfigGetBoolean(addressInKeysFile, 'mailinglist'):
addresses[len(addresses)-1].append(5) # magenta
else:
addresses[len(addresses)-1].append(0) # black
addresses.reverse()
stdscr.clear()
redraw(stdscr)
while quit == False:
drawtab(stdscr)
handlech(stdscr.getch(), stdscr)
def shutdown():
sys.stdout = sys.__stdout__
print("Shutting down...")
sys.stdout = printlog
shared.doCleanShutdown()
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
os._exit(0)
|
yakky/django-cms
|
refs/heads/develop
|
cms/tests/test_api.py
|
3
|
import sys
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldError
from django.core.exceptions import PermissionDenied
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from djangocms_text_ckeditor.models import Text
from menus.menu_pool import menu_pool
from cms.api import (
create_page,
_verify_plugin_type,
assign_user_to_page,
publish_page,
)
from cms.apphook_pool import apphook_pool
from cms.constants import TEMPLATE_INHERITANCE_MAGIC
from cms.models.pagemodel import Page
from cms.models.permissionmodels import GlobalPagePermission
from cms.plugin_base import CMSPluginBase
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.menu_extender import TestMenu
from cms.tests.test_apphooks import APP_MODULE, APP_NAME
def _grant_page_permission(user, codename):
content_type = ContentType.objects.get_by_natural_key('cms', 'page')
perm = Permission.objects.get_or_create(codename='%s_page' % codename,
content_type=content_type)[0]
user.user_permissions.add(perm)
class PythonAPITests(CMSTestCase):
def _get_default_create_page_arguments(self):
return {
'title': 'Test',
'template': 'nav_playground.html',
'language': 'en'
}
def test_invalid_apphook_type(self):
self.assertRaises(TypeError, create_page, apphook=1,
**self._get_default_create_page_arguments())
def test_invalid_template(self):
kwargs = self._get_default_create_page_arguments()
kwargs['template'] = "not_valid.htm"
with self.settings(CMS_TEMPLATES=[("not_valid.htm", "notvalid")]):
self.assertRaises(TemplateDoesNotExist, create_page, **kwargs)
kwargs['template'] = TEMPLATE_INHERITANCE_MAGIC
create_page(**kwargs)
def test_apphook_by_class(self):
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
apphooks = (
'%s.%s' % (APP_MODULE, APP_NAME),
)
with self.settings(CMS_APPHOOKS=apphooks):
apphook_pool.clear()
apphook = apphook_pool.get_apphook(APP_NAME)
page = create_page(apphook=apphook,
**self._get_default_create_page_arguments())
self.assertEqual(page.get_application_urls('en'), APP_NAME)
def test_invalid_dates(self):
self.assertRaises(AssertionError, create_page, publication_date=1,
**self._get_default_create_page_arguments())
self.assertRaises(AssertionError, create_page, publication_end_date=1,
**self._get_default_create_page_arguments())
def test_nav_extenders_invalid_type(self):
if not menu_pool.discovered:
menu_pool.discover_menus()
self.old_menu = menu_pool.menus
menu_pool.menus = {'TestMenu': TestMenu}
self.assertRaises(AssertionError, create_page, navigation_extenders=1,
**self._get_default_create_page_arguments())
menu_pool.menus = self.old_menu
def test_nav_extenders_invalid_menu(self):
if not menu_pool.discovered:
menu_pool.discover_menus()
self.old_menu = menu_pool.menus
menu_pool.menus = {}
self.assertRaises(AssertionError, create_page,
navigation_extenders=TestMenu,
**self._get_default_create_page_arguments())
menu_pool.menus = self.old_menu
def test_nav_extenders_valid(self):
if not menu_pool.discovered:
menu_pool.discover_menus()
self.old_menu = menu_pool.menus
menu_pool.menus = {'TestMenu': TestMenu}
page = create_page(navigation_extenders='TestMenu',
**self._get_default_create_page_arguments())
self.assertEqual(page.navigation_extenders, 'TestMenu')
menu_pool.menus = self.old_menu
def test_verify_plugin_type_invalid_type(self):
self.assertRaises(TypeError, _verify_plugin_type, 1)
def test_verify_plugin_type_string(self):
plugin_model, plugin_type = _verify_plugin_type("TextPlugin")
self.assertEqual(plugin_model, Text)
self.assertEqual(plugin_type, 'TextPlugin')
def test_verify_plugin_type_string_invalid(self):
self.assertRaises(TypeError, _verify_plugin_type, "InvalidPlugin")
def test_verify_plugin_type_plugin_class(self):
plugin_model, plugin_type = _verify_plugin_type(TextPlugin)
self.assertEqual(plugin_model, Text)
self.assertEqual(plugin_type, 'TextPlugin')
def test_verify_plugin_type_invalid_plugin_class(self):
class InvalidPlugin(CMSPluginBase):
model = Text
self.assertRaises(AssertionError, _verify_plugin_type, InvalidPlugin)
def test_assign_user_to_page_nothing(self):
page = create_page(**self._get_default_create_page_arguments())
user = get_user_model().objects.create_user(username='user', email='user@django-cms.org',
password='user')
user.is_staff = True
self.assertFalse(page.has_change_permission(user))
def test_assign_user_to_page_single(self):
page = create_page(**self._get_default_create_page_arguments())
user = get_user_model().objects.create_user(username='user', email='user@django-cms.org',
password='user')
user.is_staff = True
user.save()
assign_user_to_page(page, user, can_change=True)
self.assertFalse(page.has_change_permission(user))
self.assertFalse(page.has_add_permission(user))
_grant_page_permission(user, 'change')
page = Page.objects.get(pk=page.pk)
user = get_user_model().objects.get(pk=user.pk)
self.assertTrue(page.has_change_permission(user))
self.assertFalse(page.has_add_permission(user))
def test_assign_user_to_page_all(self):
page = create_page(**self._get_default_create_page_arguments())
user = get_user_model().objects.create_user(username='user', email='user@django-cms.org',
password='user')
user.is_staff = True
user.save()
assign_user_to_page(page, user, grant_all=True)
self.assertFalse(page.has_change_permission(user))
self.assertFalse(page.has_add_permission(user))
_grant_page_permission(user, 'change')
_grant_page_permission(user, 'add')
page = Page.objects.get(pk=page.pk)
user = get_user_model().objects.get(pk=user.pk)
self.assertTrue(page.has_change_permission(user))
self.assertTrue(page.has_add_permission(user))
def test_page_overwrite_url_default(self):
self.assertEqual(Page.objects.all().count(), 0)
home = create_page('root', 'nav_playground.html', 'en', published=True)
self.assertTrue(home.is_published('en', True))
self.assertFalse(home.is_home)
page = create_page(**self._get_default_create_page_arguments())
self.assertFalse(page.is_home)
self.assertFalse(page.get_title_obj_attribute('has_url_overwrite'))
self.assertEqual(page.get_title_obj_attribute('path'), 'test')
def test_create_page_can_overwrite_url(self):
page_attrs = self._get_default_create_page_arguments()
page_attrs["overwrite_url"] = 'test/home'
page = create_page(**page_attrs)
self.assertTrue(page.get_title_obj_attribute('has_url_overwrite'))
self.assertEqual(page.get_title_obj_attribute('path'), 'test/home')
def test_create_page_atomic(self):
# Ref: https://github.com/divio/django-cms/issues/5652
# We'll simulate a scenario where a user creates a page with an
# invalid template which causes Django to throw an error when the
# template is scanned for placeholders and thus short circuits the
# creation mechanism.
page_attrs = self._get_default_create_page_arguments()
# It's important to use TEMPLATE_INHERITANCE_MAGIC to avoid the cms
# from loading the template before saving and triggering the template error
# Instead, we delay the loading of the template until after the save is executed.
page_attrs["template"] = TEMPLATE_INHERITANCE_MAGIC
self.assertFalse(Page.objects.filter(template=TEMPLATE_INHERITANCE_MAGIC).exists())
with self.settings(CMS_TEMPLATES=[("col_invalid.html", "notvalid")]):
self.assertRaises(TemplateSyntaxError, create_page, **page_attrs)
# The template raised an exception which should cause the database to roll back
# instead of committing a page in a partial state.
self.assertFalse(Page.objects.filter(template=TEMPLATE_INHERITANCE_MAGIC).exists())
def test_create_reverse_id_collision(self):
create_page('home', 'nav_playground.html', 'en', published=True, reverse_id="foo")
self.assertRaises(FieldError, create_page, 'foo', 'nav_playground.html', 'en', published=True, reverse_id="foo")
self.assertTrue(Page.objects.count(), 2)
def test_publish_page(self):
page_attrs = self._get_default_create_page_arguments()
page_attrs['language'] = 'en'
page_attrs['published'] = False
page = create_page(**page_attrs)
self.assertFalse(page.is_published('en'))
self.assertEqual(page.changed_by, 'script')
user = get_user_model().objects.create_user(username='user', email='user@django-cms.org',
password='user')
# Initially no permission
self.assertRaises(PermissionDenied, publish_page, page, user, 'en')
user.is_staff = True
user.save()
# Permissions are cached on user instances, so create a new one.
user = get_user_model().objects.get(pk=user.pk)
self.add_permission(user, 'change_page')
self.add_permission(user, 'publish_page')
gpp = GlobalPagePermission.objects.create(user=user, can_change=True, can_publish=True)
gpp.sites.add(page.node.site)
publish_page(page, user, 'en')
# Reload the page to get updates.
page = page.reload()
self.assertTrue(page.is_published('en'))
self.assertEqual(page.changed_by, user.get_username())
def test_create_page_assert_parent_is_draft(self):
page_attrs = self._get_default_create_page_arguments()
page_attrs['published'] = True
parent_page = create_page(**page_attrs)
parent_page_public = parent_page.get_public_object()
self.assertRaises(AssertionError, create_page, parent=parent_page_public, **page_attrs)
def test_create_page_page_title(self):
page = create_page(**dict(self._get_default_create_page_arguments(), page_title='page title'))
self.assertEqual(page.get_title_obj_attribute('page_title'), 'page title')
def test_create_page_with_position_regression_6345(self):
# ref: https://github.com/divio/django-cms/issues/6345
parent = create_page('p', 'nav_playground.html', 'en')
rightmost = create_page('r', 'nav_playground.html', 'en', parent=parent)
leftmost = create_page('l', 'nav_playground.html', 'en', parent=rightmost, position='left')
create_page('m', 'nav_playground.html', 'en', parent=leftmost, position='right')
children_titles = list(p.get_title('de') for p in parent.get_child_pages())
self.assertEqual(children_titles, ['l', 'm', 'r'])
|
patriciolobos/desa8
|
refs/heads/master
|
openerp/addons/l10n_fr_hr_payroll/l10n_fr_hr_payroll.py
|
340
|
#-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
class res_company(osv.osv):
_inherit = 'res.company'
_columns = {
'plafond_secu': fields.float('Plafond de la Securite Sociale', digits_compute=dp.get_precision('Payroll')),
'nombre_employes': fields.integer('Nombre d\'employes'),
'cotisation_prevoyance': fields.float('Cotisation Patronale Prevoyance', digits_compute=dp.get_precision('Payroll')),
'org_ss': fields.char('Organisme de securite sociale'),
'conv_coll': fields.char('Convention collective'),
}
class hr_contract(osv.osv):
_inherit = 'hr.contract'
_columns = {
'qualif': fields.char('Qualification'),
'niveau': fields.char('Niveau'),
'coef': fields.char('Coefficient'),
}
class hr_payslip(osv.osv):
_inherit = 'hr.payslip'
_columns = {
'payment_mode': fields.char('Mode de paiement'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
tonybaloney/st2
|
refs/heads/master
|
st2common/st2common/constants/api.py
|
10
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'DEFAULT_API_VERSION'
]
DEFAULT_API_VERSION = 'v1'
REQUEST_ID_HEADER = 'X-Request-ID'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.