code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
import pytest
from func_prototypes import *
def test_dictjoin():
eng = {1:"one", 2:"two"}
esp = {1:"uno", 2:"dos", 3:"tres"}
com = {1:("one", "uno"), 2:("two", "dos")}
assert com == dictjoin(eng, esp)
with pytest.raises(KeyError):
dictjoin(esp, eng)
|
andrewguy9/func_prototypes
|
tests/test_util.py
|
Python
|
mit
| 266
|
# -*- coding: utf-8 -*-
import os
import sys
import platform
from setuptools import setup
OPTIONS = {
'iconfile':'assets/clock.icns',
'includes' : ['sqlalchemy.dialects.sqlite']
}
DATA_FILES = ['./assets/clock.png',
'./assets/clock_grey.png',
'./assets/cursor.png',
'./traces/preferences.xib',
'./traces/experience.xib']
setup(
name="Traces",
app=['traces/__init__.py'],
version='0.9.1',
setup_requires=["py2app"],
options={'py2app': OPTIONS},
data_files=DATA_FILES,
description= 'Activity Tracker',
install_requires=[
"pyobjc-core",
"pyobjc-framework-Cocoa",
"pyobjc-framework-Quartz",
"sqlalchemy",
]
)
|
activityhistory/traces
|
setup.py
|
Python
|
gpl-3.0
| 644
|
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import datetime
from decimal import Decimal
import lxml.html as html
from six.moves.html_parser import HTMLParser
from weboob.tools.compat import basestring, unicode, urljoin
from weboob.tools.html import html2text
from .base import _NO_DEFAULT, Filter, FilterError, _Selector, debug, ItemNotFound
from .standard import (
TableCell, ColumnNotFound, # TODO move class here when modules are migrated
CleanText,
)
__all__ = ['CSS', 'XPath', 'XPathNotFound', 'AttributeNotFound',
'Attr', 'Link', 'AbsoluteLink',
'CleanHTML', 'FormValue', 'HasElement',
'TableCell', 'ColumnNotFound',
'ReplaceEntities',
]
class XPathNotFound(ItemNotFound):
pass
class AttributeNotFound(ItemNotFound):
pass
class CSS(_Selector):
"""Select HTML elements with a CSS selector
For example::
obj_foo = CleanText(CSS('div.main'))
will take the text of all ``<div>`` having CSS class "main".
"""
def select(self, selector, item):
ret = item.cssselect(selector)
if isinstance(ret, list):
for el in ret:
if isinstance(el, html.HtmlElement):
self.highlight_el(el, item)
return ret
class XPath(_Selector):
"""Select HTML elements with a XPath selector
"""
pass
class Attr(Filter):
"""Get the text value of an HTML attribute.
Get value from attribute `attr` of HTML element matched by `selector`.
For example::
obj_foo = Attr('//img[@id="thumbnail"]', 'src')
will take the "src" attribute of ``<img>`` whose "id" is "thumbnail".
"""
def __init__(self, selector, attr, default=_NO_DEFAULT):
"""
:param selector: selector targeting the element
:param attr: name of the attribute to take
"""
super(Attr, self).__init__(selector, default=default)
self.attr = attr
@debug()
def filter(self, el):
"""
:raises: :class:`XPathNotFound` if no element is found
:raises: :class:`AttributeNotFound` if the element doesn't have the requested attribute
"""
try:
return u'%s' % el[0].attrib[self.attr]
except IndexError:
return self.default_or_raise(XPathNotFound('Unable to find element %s' % self.selector))
except KeyError:
return self.default_or_raise(AttributeNotFound('Element %s does not have attribute %s' % (el[0], self.attr)))
class Link(Attr):
"""
Get the link uri of an element.
If the ``<a>`` tag is not found, an exception `IndexError` is raised.
"""
def __init__(self, selector=None, default=_NO_DEFAULT):
super(Link, self).__init__(selector, 'href', default=default)
class AbsoluteLink(Link):
"""Get the absolute link URI of an element.
"""
def __call__(self, item):
ret = super(AbsoluteLink, self).__call__(item)
if ret:
ret = urljoin(item.page.url, ret)
return ret
class CleanHTML(Filter):
"""Convert HTML to text (Markdown) using html2text.
.. seealso:: `html2text site <https://pypi.python.org/pypi/html2text>`_
"""
def __init__(self, selector=None, options=None, default=_NO_DEFAULT):
"""
:param options: options suitable for html2text
:type options: dict
"""
super(CleanHTML, self).__init__(selector=selector, default=default)
self.options = options
@debug()
def filter(self, txt):
if isinstance(txt, (tuple, list)):
return u' '.join([self.clean(item, self.options) for item in txt])
return self.clean(txt, self.options)
@classmethod
def clean(cls, txt, options=None):
if not isinstance(txt, basestring):
txt = html.tostring(txt, encoding=unicode)
options = options or {}
return html2text(txt, **options)
class UnrecognizedElement(Exception):
pass
class FormValue(Filter):
"""
Extract a Python value from a form element.
Checkboxes and radio return booleans, while the rest
return text. For ``<select>`` tags, returns the user-visible text.
"""
@debug()
def filter(self, el):
try:
el = el[0]
except IndexError:
return self.default_or_raise(XPathNotFound('Unable to find element %s' % self.selector))
if el.tag == 'input':
# checkboxes or radios
if el.attrib.get('type') in ('radio', 'checkbox'):
return 'checked' in el.attrib
# regular text input
elif el.attrib.get('type', '') in ('', 'text', 'email', 'search', 'tel', 'url', 'password', 'hidden', 'color'):
try:
return unicode(el.attrib['value'])
except KeyError:
return self.default_or_raise(AttributeNotFound('Element %s does not have attribute value' % el))
# numeric input
elif el.attrib.get('type', '') in ('number', 'range'):
try:
if '.' in el.attrib.get('step', ''):
return Decimal(el.attrib['value'])
else:
return int(el.attrib['value'])
except KeyError:
return self.default_or_raise(AttributeNotFound('Element %s does not have attribute value' % el))
# datetime input
try:
if el.attrib.get('type', '') == 'date':
return datetime.datetime.strptime(el.attrib['value'], '%Y-%m-%d').date()
elif el.attrib.get('type', '') == 'time':
return datetime.datetime.strptime(el.attrib['value'], '%H:%M').time()
elif el.attrib.get('type', '') == 'datetime-local':
return datetime.datetime.strptime(el.attrib['value'], '%Y-%m-%dT%H:%M')
except KeyError:
return self.default_or_raise(AttributeNotFound('Element %s does not have attribute value' % el))
else:
raise UnrecognizedElement('Element %s is not recognized' % el)
elif el.tag == 'textarea':
return unicode(el.text)
elif el.tag == 'select':
options = el.xpath('.//option[@selected]')
# default is the first one
if len(options) == 0:
options = el.xpath('.//option[1]')
return u'\n'.join([unicode(o.text) for o in options])
else:
raise UnrecognizedElement('Element %s is not recognized' % el)
class HasElement(Filter):
"""
Returns `yesvalue` if the `selector` finds elements, `novalue` otherwise.
"""
def __init__(self, selector, yesvalue=True, novalue=False):
super(HasElement, self).__init__(selector, default=novalue)
self.yesvalue = yesvalue
@debug()
def filter(self, value):
if value:
return self.yesvalue
return self.default_or_raise(FilterError('No default value'))
class ReplaceEntities(CleanText):
"""
Filter to replace HTML entities like "é" or "B" with their unicode counterpart.
"""
def filter(self, data):
h = HTMLParser()
txt = super(ReplaceEntities, self).filter(data)
return h.unescape(txt)
|
laurentb/weboob
|
weboob/browser/filters/html.py
|
Python
|
lgpl-3.0
| 8,039
|
from django.test import TestCase
from astrobin_apps_equipment.templatetags.astrobin_apps_equipment_tags import equipment_listing_url_with_tags
from astrobin_apps_equipment.tests.equipment_generators import EquipmentGenerators
class TestTagEquipmentListingUrlWithUtmTags(TestCase):
def test_simple_url(self):
listing = EquipmentGenerators.equipment_brand_listing(url='https://www.example.com')
self.assertEqual(
f'https://www.example.com?brand={listing.brand.name}&retailer={listing.retailer.name}&source=foo',
equipment_listing_url_with_tags(listing, 'foo')
)
def test_complex_url(self):
listing = EquipmentGenerators.equipment_brand_listing(url='https://www.example.com/1/2/3/')
self.assertEqual(
f'https://www.example.com/1/2/3/?brand={listing.brand.name}&retailer={listing.retailer.name}&source=foo',
equipment_listing_url_with_tags(listing, 'foo')
)
def test_url_with_params(self):
listing = EquipmentGenerators.equipment_brand_listing(url='https://www.example.com/search?q=foo')
self.assertEqual(
f'https://www.example.com/search?q=foo&brand={listing.brand.name}&retailer={listing.retailer.name}&source=foo',
equipment_listing_url_with_tags(listing, 'foo')
)
def test_url_with_conflicting_params(self):
listing = EquipmentGenerators.equipment_brand_listing(url='https://www.example.com/search?q=foo&source=foo')
self.assertEqual(
'https://www.example.com/search?q=foo&source=foo',
equipment_listing_url_with_tags(listing, 'bar')
)
|
astrobin/astrobin
|
astrobin_apps_equipment/tests/test_tag_equipment_listings_url_with_utm_tags.py
|
Python
|
agpl-3.0
| 1,650
|
import sys
if sys.version_info < (3, 7):
from ._decreasing import Decreasing
from ._font import Font
from ._increasing import Increasing
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[],
["._decreasing.Decreasing", "._font.Font", "._increasing.Increasing"],
)
|
plotly/python-api
|
packages/python/plotly/plotly/graph_objs/indicator/delta/__init__.py
|
Python
|
mit
| 381
|
#!/usr/bin/env python
import os
import tempfile
import pipes
import subprocess
import time
import random
import shutil
try:
from wand.image import Image
from wand.display import display
except ImportError as e:
# cd /usr/lib/
# ln -s libMagickWand-6.Q16.so libMagickWand.so
print("Couldn't import Wand package.")
print("Please refer to #http://dahlia.kr/wand/ to install it.")
import traceback; traceback.print_exc()
raise e
try:
import magic
mime = magic.Magic()
except ImportError:
mime = None
#https://github.com/ahupp/python-magic
try:
from docopt import docopt
except ImportError:
print("Couldn't import Docopt package.")
print("Please refer to#https://github.com/docopt/docopt to install it.")
print("/!\\ Option parsing not possible, defaulting to hardcoded values/!\\")
def to_bool(val):
if val is None:
return false
return val == 1
def to_int(val):
return int(val)
def to_str(val):
return val
def to_path(val):
return val
OPT_TO_KEY = {
'--do-wrap' : ("DO_WRAP", to_bool),
'--line-height': ("LINE_HEIGHT", to_int),
'--nb-lines' : ('LINES', to_int),
'--no-caption' : ("WANT_NO_CAPTION", to_bool),
'--force-no-vfs': ("FORCE_VFS", to_bool),
'--force-vfs' : ("FORCE_NO_VFS", to_bool),
'--pick-random': ("PICK_RANDOM", to_bool),
'--put-random' : ("PUT_RANDOM", to_bool),
'--resize' : ("DO_RESIZE", to_bool),
'--sleep' : ('SLEEP_TIME', to_int),
'--width' : ('WIDTH', to_int),
'--no-switch-to-mini': ("NO_SWITCH_TO_MINI", to_bool),
'<path>' : ('PATH', to_path),
'<target>' : ('TARGET', to_path),
'--polaroid' : ("DO_POLAROID", to_bool),
'--format' : ("IMG_FORMAT_SUFFIX", to_str),
'--crop-size' : ("CROP_SIZE", to_int),
'~~use-vfs' : ("USE_VFS", to_bool),
'--help' : ("HELP", to_bool)
}
KEY_TO_OPT = dict([(key, (opt, ttype)) for opt, (key, ttype) in OPT_TO_KEY.items()])
PARAMS = {
"PATH" : "/home/kevin/mount/first",
"TARGET" : "/tmp/final.png",
#define the size of the picture
"WIDTH" : 2000,
#define how many lines do we want
"LINES": 2,
"LINE_HEIGHT": 200,
#minimum width of cropped image. Below that, we black it out
#only for POLAROID
"CROP_SIZE": 1000,
"IMG_FORMAT_SUFFIX": ".png",
# False if PATH is a normal directory, True if it is WebAlbums-FS
"USE_VFS": False,
"FORCE_VFS": False,
"FORCE_NO_VFS": False,
# True if end-of-line photos are wrapped to the next line
"DO_WRAP": False,
# True if we want a black background and white frame, plus details
"DO_POLAROID": True,
"WANT_NO_CAPTION": True,
# False if we want to add pictures randomly
"PUT_RANDOM": False,
"DO_RESIZE": False,
### VFS options ###
"NO_SWITCH_TO_MINI": False,
### Directory options ###
# False if we pick directory images sequentially, false if we take them randomly
"PICK_RANDOM": False, #not implemented yet
## Random wall options ##
"SLEEP_TIME": 0,
"HELP": False
}
DEFAULTS = dict([(key, value) for key, value in PARAMS.items()])
DEFAULTS_docstr = dict([(KEY_TO_OPT[key][0], value) for key, value in PARAMS.items()])
usage = """Photo Wall for WebAlbums 3.
Usage:
photowall.py <path> <target> [options]
Arguments:
<path> The path where photos are picked up from. [default: %(<path>)s]
<target> The path where the target photo is written. Except in POLAROID+RANDOM mode, the image will be blanked out first. [default: %(<target>)s]
Options:
--polaroid Use polaroid-like images for the wall
--width <width> Set final image width. [default: %(--width)d]
--nb-lines <nb> Number on lines of the target image. [default: %(--nb-lines)d]
--resize Resize images before putting in the wall. [default: %(--resize)s]
--line-height <height> Set the height of a single image. [default: %(--line-height)d]
--do-wrap If not POLAROID, finish images on the next line. [default: %(--do-wrap)s]
--help Display this message
Polaroid mode options:
--crop-size <crop> Minimum size to allow cropping an image. [default: %(--crop-size)s]
--no-caption Disable caption. [default: %(--no-caption)s]
--put-random Put images randomly instead of linearily. [default: %(--put-random)s]
--sleep <time> If --put-random, time (in seconds) to go asleep before adding a new image. [default: %(--sleep)d]
Filesystem options:
--force-vfs Treat <path> as a VFS filesystem. [default: %(--force-vfs)s]
--force-no-vfs Treat <path> as a normal filesystem. [default: %(--force-no-vfs)s]
--no-switch-to-mini If VFS, don't switch from the normal image to the miniature. [default: %(--no-switch-to-mini)s]
--pick-random If not VFS, pick images randomly in the <path> folder. [default: %(--pick-random)s]
""" % DEFAULTS_docstr
class UpdateCallback:
def newExec(self):
pass
def newImage(self, row=0, col=0, filename=""):
print("%d.%d > %s" % (row, col, filename))
def updLine(self, row, tmpLine):
#print("--- %d ---" % row)
pass
def newFinal(self, name):
pass
def finished(self, name):
print("==========")
def stopRequested(self):
return False
def checkPause(self):
pass
updateCB = UpdateCallback()
if __name__ == "__main__":
arguments = docopt(usage, version="3.5-dev")
if arguments["--help"]:
print(usage)
exit()
param_args = dict([(OPT_TO_KEY[opt][0], OPT_TO_KEY[opt][1](value)) for opt, value in arguments.items()])
PARAMS = dict(PARAMS, **param_args)
###########################################
###########################################
previous = None
def get_next_file_vfs():
global previous
if previous is not None:
try:
os.unlink(previous)
except OSerror:
pass
files = os.listdir(PARAMS["PATH"])
for filename in files:
if not "By Years" in filename:
previous = PARAMS["PATH"]+filename
if "gpx" in previous:
return get_next_file()
to_return = previous
try:
to_return = os.readlink(to_return)
except OSError:
pass
if not PARAMS["NO_SWITCH_TO_MINI"]:
to_return = to_return.replace("/images/", "/miniatures/") + ".png"
return to_return
def get_file_details(filename):
try:
link = filename
try:
link = os.readlink(filename)
except OSError:
pass
link = pipes.quote(link)
names = link[link.index("/miniatures/" if not PARAMS["NO_SWITCH_TO_MINI"] else "/images"):].split("/")[2:]
theme, year, album, fname = names
return "%s (%s)" % (album, theme)
except Exception as e:
#print("Cannot get details from {}: {}".format(filename, e))
fname = get_file_details_dir(filename)
fname = fname.rpartition(".")[0]
fname = fname.replace("_", "\n")
return fname
###########################################
class GetFileDir:
def __init__(self, randomize):
self.idx = 0
self.files = os.listdir(PARAMS["PATH"])
if len(self.files) == 0:
raise EnvironmentError("No file available")
self.files.sort()
if randomize:
print("RANDOMIZE")
random.shuffle(self.files)
def get_next_file(self):
to_return = self.files[self.idx]
self.idx += 1
self.idx %= len(self.files)
return PARAMS["PATH"]+to_return
def get_file_details_dir(filename):
return filename[filename.rindex("/")+1:]
###########################################
###########################################
def do_append(first, second, underneath=False):
sign = "-" if underneath else "+"
background = "-background black" if PARAMS["DO_POLAROID"] else ""
command = "convert -gravity center %s %sappend %s %s %s" % (background, sign, first, second, first)
ret = subprocess.call(command, shell=True)
if ret != 0:
raise Exception("Command failed: ", command)
def do_polaroid (image, filename=None, background="black", suffix=None):
if suffix is None:
suffix = PARAMS["IMG_FORMAT_SUFFIX"]
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=suffix)
tmp.close()
image.save(filename=tmp.name)
if not(PARAMS["WANT_NO_CAPTION"]) and filename:
details = get_file_details(filename)
caption = """-caption "%s" """ % details.replace("'", "\\'")
else:
caption = ""
command = "convert -bordercolor snow -background %(bg)s -gravity center %(caption)s +polaroid %(name)s %(name)s" % {"bg" : background, "name":tmp.name, "caption":caption}
ret = subprocess.call(command, shell=True)
if ret != 0:
raise Exception("Command failed: "+ command)
img = Image(filename=tmp.name).clone()
os.unlink(tmp.name)
img.resize(width=image.width, height=image.height)
return img
def do_blank_image(height, width, filename, color="black"):
command = "convert -size %dx%d xc:%s %s" % (width, height, color, filename)
ret = subprocess.call(command, shell=True)
if ret != 0:
raise Exception("Command failed: "+ command)
def do_polaroid_and_random_composite(target_filename, target, image, filename):
PERCENT_IN = 100
image = do_polaroid(image, filename, background="transparent", suffix=".png")
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=PARAMS["IMG_FORMAT_SUFFIX"])
image.save(filename=tmp.name)
height = random.randint(0, target.height - image.height) - target.height/2
width = random.randint(0, target.width - image.width) - target.width/2
geometry = ("+" if height >= 0 else "") + str(height) + ("+" if width >= 0 else "") + str(width)
command = "composite -geometry %s -compose Over -gravity center %s %s %s" % (geometry, tmp.name, target_filename, target_filename)
ret = os.system(command)
os.unlink(tmp.name)
if ret != 0:
raise object("failed")
def photowall(name):
output_final = None
previous_filename = None
#for all the rows,
for row in range(PARAMS["LINES"]):
output_row = None
row_width = 0
#concatenate until the image width is reached
img_count = 0
while row_width < PARAMS["WIDTH"]:
# get a new file, or the end of the previous one, if it was split
filename = get_next_file() if previous_filename is None else previous_filename
mimetype = None
previous_filename = None
# get a real image
if mime is not None:
mimetype = mime.from_file(filename)
if "symbolic link" in mimetype:
filename = os.readlink(filename)
mimetype = mime.from_file(filename)
if not "image" in mimetype:
continue
else:
try:
filename = os.readlink(filename)
except OSError:
pass
updateCB.newImage(row, img_count, filename)
img_count += 1
# resize the image
image = Image(filename=filename)
with image.clone() as clone:
factor = float(PARAMS["LINE_HEIGHT"])/clone.height
clone.resize(height=PARAMS["LINE_HEIGHT"], width=int(clone.width*factor))
#if the new image makes an overflow
if row_width + clone.width > PARAMS["WIDTH"]:
#compute how many pixels will overflow
overflow = row_width + clone.width - PARAMS["WIDTH"]
will_fit = clone.width - overflow
if PARAMS["DO_POLAROID"] and will_fit < PARAMS["CROP_SIZE"]:
row_width = PARAMS["WIDTH"]
previous_filename = filename
print("Doesn't fit")
continue
if PARAMS["DO_WRAP"]:
with clone.clone() as next_img:
next_img.crop(will_fit+1, 0, width=overflow, height=PARAMS["LINE_HEIGHT"])
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=PARAMS["IMG_FORMAT_SUFFIX"])
tmp.close()
next_img.save(filename=tmp.name)
previous_filename = tmp.name
clone.crop(0, 0, width=will_fit, height=PARAMS["LINE_HEIGHT"])
if PARAMS["DO_POLAROID"]:
clone = do_polaroid(clone, filename)
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=PARAMS["IMG_FORMAT_SUFFIX"])
tmp.close()
clone.save(filename=tmp.name)
row_width += clone.width
if output_row is not None:
do_append(output_row.name, tmp.name)
os.unlink(tmp.name)
else:
output_row = tmp
updateCB.updLine(row, output_row.name)
updateCB.checkPause()
if updateCB.stopRequested():
break
else:
if output_final is not None:
do_append(output_final.name, output_row.name, underneath=True)
os.unlink(output_row.name)
else:
output_final = output_row
updateCB.newFinal(output_final.name)
if output_final is not None:
shutil.move(output_final.name, name)
updateCB.finished(name)
else:
updateCB.finished(None)
return name
def random_wall(real_target_filename):
name = real_target_filename
filename = name[name.rindex("/"):]
name = filename[:filename.index(".")]
ext = filename[filename.index("."):]
target_filename = tempfile.gettempdir()+"/"+name+".2"+ext
try:
#remove any existing tmp file
os.unlink(target_filename)
except:
pass
try:
#if source already exist, build up on it
os.system("cp %s %s" % (target_filename, real_target_filename))
except:
pass
print("Target file is %s" % real_target_filename )
target = None
if mime is not None:
try:
mimetype = mime.from_file(target_filename)
if "symbolic link" in mimetype:
filename = os.readlink(target_filename)
mimetype = mime.from_file(target_filename)
if "image" in mimetype:
target = Image(filename=target_filename)
except IOError:
pass
if target is None:
height = PARAMS["LINES"] * PARAMS["LINE_HEIGHT"]
do_blank_image(height, PARAMS["WIDTH"], target_filename)
target = Image(filename=target_filename)
cnt = 0
while True:
updateCB.checkPause()
if updateCB.stopRequested():
break
filename = get_next_file()
print(filename)
img = Image(filename=filename)
with img.clone() as clone:
if PARAMS["DO_RESIZE"]:
factor = float(PARAMS["LINE_HEIGHT"])/clone.height
clone.resize(width=int(clone.width*factor), height=int(clone.height*factor))
do_polaroid_and_random_composite(target_filename, target, clone, filename)
updateCB.checkPause()
if updateCB.stopRequested():
break
updateCB.newImage(row=cnt, filename=filename)
updateCB.newFinal(target_filename)
os.system("cp %s %s" % (target_filename, real_target_filename))
cnt += 1
updateCB.checkPause()
if updateCB.stopRequested():
break
time.sleep(PARAMS["SLEEP_TIME"])
updateCB.checkPause()
if updateCB.stopRequested():
break
get_next_file = None
def path_is_jnetfs(path):
#check if PATH is VFS or not
df_output_lines = os.popen("df -Ph '%s'" % path).read().splitlines()
return df_output_lines and "JnetFS" in df_output_lines[1]
def fix_args():
global get_next_file
if PARAMS["PATH"][-1] != "/":
PARAMS["PATH"] += "/"
if PARAMS["FORCE_NO_VFS"]:
PARAMS["USE_VFS"]
elif PARAMS["FORCE_NO_VFS"]:
PARAMS["USE_VFS"]
else:
PARAMS["USE_VFS"] = path_is_jnetfs(PARAMS["PATH"])
if not PARAMS["USE_VFS"]:
get_next_file = GetFileDir(PARAMS["PICK_RANDOM"]).get_next_file
else:
get_next_file = get_next_file_vfs
def do_main():
fix_args()
updateCB.newExec()
target = PARAMS["TARGET"]
if not(PARAMS["PUT_RANDOM"]):
photowall(target)
else:
random_wall(target)
if __name__== "__main__":
do_main()
|
wazari972/WebAlbums
|
WebAlbums-FS/WebAlbums-Utils/Photowall/photowall.py
|
Python
|
gpl-3.0
| 15,846
|
#!/usr/bin/env python
# Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license.
# See LICENSE in the project root for license information.
# -*- coding:utf-8 -*-
from iris.bin.sender import init_sender
import msgpack
def test_configure(mocker):
mocker.patch('iris.sender.cache.RoleTargets.initialize_active_targets')
mocker.patch('iris.db.init')
mocker.patch('iris.bin.sender.api_cache.cache_priorities')
mocker.patch('iris.bin.sender.api_cache.cache_applications')
mocker.patch('iris.bin.sender.api_cache.cache_modes')
init_sender({
'db': {
'conn': {
'kwargs': {
'scheme': 'mysql+pymysql',
'user': 'foo',
'password': 'bar',
'host': '127.0.0.1',
'database': 'iris',
'charset': 'utf8',
},
'str': '%(scheme)s://%(user)s:%(password)s@%(host)s/%(database)s?charset=%(charset)s'
},
'kwargs': {
'pool_recycle': 3600,
'echo': True,
'pool_size': 100,
'max_overflow': 100,
'pool_timeout': 60,
}
},
'sender': {
'debug': True,
},
'oncall': 'http://localhost:8002',
'role_lookup': 'dummy',
'metrics': 'dummy',
'skipsend': True,
'skipgmailwatch': True,
})
fake_message = {
'message_id': 1234,
'plan_id': 19546,
'application': 'test-app',
'priority': 'high',
'target': 'test-user',
'mode': 'sms',
}
fake_notification = {
'application': 'test-app',
'priority': 'high',
'target': 'test-user',
'role': 'user',
'subject': 'test subject',
}
fake_plan = {
u'name': u'find-test-user',
u'threshold_count': 10,
u'creator': u'test-user',
u'created': 1470444636,
u'aggregation_reset': 300,
u'aggregation_window': 300,
u'threshold_window': 900,
u'tracking_type': None,
u'steps': [
[{u'repeat': 0, u'target': u'test-user', u'id': 178243, u'priority': u'low', u'step': 1,
u'role': u'user', u'template': u'test-app Default', u'wait': 0},
{u'repeat': 1, u'target': u'test-user', u'id': 178252, u'priority': u'high', u'step': 1,
u'role': u'user', u'template': u'test-app Default', u'wait': 300}],
[{u'repeat': 3, u'target': u'test-user', u'id': 178261, u'priority': u'urgent', u'step': 2,
u'role': u'user', u'template': u'test-app Default', u'wait': 900}]
],
u'tracking_template': None,
u'tracking_key': None,
u'active': 1,
u'id': 19546,
u'description': u"please don't abuse this plan :)"
}
def test_fetch_and_prepare_message(mocker):
mock_iris_client = mocker.patch('iris.sender.cache.iris_client')
mock_iris_client.get.return_value.json.return_value = fake_plan
from iris.bin.sender import (
fetch_and_prepare_message, message_queue, send_queue
)
# dry out message/send queue
while message_queue.qsize() > 0:
message_queue.get()
while send_queue.qsize() > 0:
send_queue.get()
message_queue.put(fake_message)
fetch_and_prepare_message()
assert message_queue.qsize() == 0
assert send_queue.qsize() == 1
m = send_queue.get()
assert m['message_id'] == fake_message['message_id']
def test_fetch_and_send_message(mocker):
def check_mark_message_sent(m):
assert m['message_id'] == fake_message['message_id']
def mock_set_target_contact(message):
message['destination'] = 'foo@example.com'
message['mode'] = 'email'
message['mode_id'] = 1
return True
mocker.patch('iris.bin.sender.db')
mocker.patch('iris.bin.sender.send_message').return_value = 1
mocker.patch('iris.bin.sender.quota')
mocker.patch('iris.bin.sender.update_message_mode')
mock_mark_message_sent = mocker.patch('iris.bin.sender.mark_message_as_sent')
mock_mark_message_sent.side_effect = check_mark_message_sent
mocker.patch('iris.bin.sender.set_target_contact').side_effect = mock_set_target_contact
mock_iris_client = mocker.patch('iris.sender.cache.iris_client')
mock_iris_client.get.return_value.json.return_value = fake_plan
from iris.bin.sender import (
fetch_and_send_message, send_queue
)
# dry out send queue
while send_queue.qsize() > 0:
send_queue.get()
send_queue.put(fake_message)
fetch_and_send_message()
assert send_queue.qsize() == 0
mock_mark_message_sent.assert_called_once()
def test_handle_api_request_v0_send(mocker):
from iris.sender.rpc import handle_api_request
from iris.sender.shared import send_queue
# support expanding target
mocker.patch('iris.sender.cache.RoleTargets.__call__', lambda _, role, target: [target])
mock_address = mocker.MagicMock()
mock_socket = mocker.MagicMock()
mock_socket.recv.return_value = msgpack.packb({
'endpoint': 'v0/send',
'data': fake_notification,
})
while send_queue.qsize() > 0:
send_queue.get()
handle_api_request(mock_socket, mock_address)
assert send_queue.qsize() == 1
m = send_queue.get()
assert m['subject'] == '[%s] %s' % (fake_notification['application'],
fake_notification['subject'])
def test_handle_api_request_v0_send_with_mode(mocker):
from iris.sender.rpc import handle_api_request
from iris.sender.shared import send_queue
# support expanding target
mocker.patch('iris.sender.cache.RoleTargets.__call__', lambda _, role, target: [target])
mocker.patch('iris.bin.sender.set_target_contact')
fake_mode_notification = {}
fake_mode_notification.update(fake_notification)
fake_mode_notification['mode'] = 'sms'
mock_address = mocker.MagicMock()
mock_socket = mocker.MagicMock()
mock_socket.recv.return_value = msgpack.packb({
'endpoint': 'v0/send',
'data': fake_mode_notification,
})
while send_queue.qsize() > 0:
send_queue.get()
handle_api_request(mock_socket, mock_address)
assert send_queue.qsize() == 1
m = send_queue.get()
assert m['subject'] == '[%s] %s' % (fake_mode_notification['application'],
fake_mode_notification['subject'])
def test_handle_api_request_v0_send_timeout(mocker):
import iris.sender.rpc
iris.sender.rpc.rpc_timeout = 5
def slee_10(x):
from gevent import sleep
sleep(10)
mock_address = mocker.MagicMock()
mock_socket = mocker.MagicMock()
mock_socket.recv.side_effect = slee_10
iris.sender.rpc.handle_api_request(mock_socket, mock_address)
mock_socket.sendall.called_with(msgpack.packb('TIMEOUT'))
def test_render_email_response_message(mocker):
from iris.bin.sender import render
mock_cursor = mocker.MagicMock()
mock_db = mocker.patch('iris.bin.sender.db')
mock_db.engine.raw_connection().cursor.return_value = mock_cursor
mock_cursor.fetchone.return_value = ['bar', 'foo']
mock_message = {'message_id': 1}
render(mock_message)
mock_cursor.execute.assert_called_once_with('SELECT `body`, `subject` FROM `message` WHERE `id` = %s', 1)
assert mock_message['body'] == 'bar'
assert mock_message['subject'] == 'foo'
def test_msgpack_handle_sets():
from iris.sender.rpc import msgpack_handle_sets
assert msgpack_handle_sets(set([1, 2, 3, 4])) == [1, 2, 3, 4]
def test_generate_slave_message_payload():
from iris.sender.rpc import generate_msgpack_message_payload
data = {
'ids': set([1, 2, 3, 4])
}
result = generate_msgpack_message_payload(data)
assert msgpack.unpackb(result) == {
'endpoint': 'v0/slave_send',
'data': {
'ids': [1, 2, 3, 4]
}
}
def test_quotas(mocker):
from iris.sender.quota import ApplicationQuota
from iris.metrics import stats
from gevent import sleep
mocker.patch('iris.sender.quota.ApplicationQuota.get_new_rules', return_value=[(u'testapp', 5, 2, 120, 120, u'testuser', u'user', u'iris-plan', 10)])
mocker.patch('iris.sender.quota.ApplicationQuota.notify_incident')
mocker.patch('iris.sender.quota.ApplicationQuota.notify_target')
quotas = ApplicationQuota(None, None, None)
sleep(1)
assert quotas.allow_send({'application': 'testapp'})
assert quotas.allow_send({'application': 'testapp'})
assert quotas.allow_send({'application': 'testapp'}) # Breach soft quota
assert quotas.allow_send({'application': 'testapp'})
assert quotas.allow_send({'application': 'testapp'})
assert not quotas.allow_send({'application': 'testapp'}) # Breach hard quota
assert not quotas.allow_send({'application': 'testapp'})
assert stats['quota_soft_exceed_cnt'] == 3
assert stats['quota_hard_exceed_cnt'] == 2
assert stats['app_testapp_quota_hard_usage_pct'] == 100
assert stats['app_testapp_quota_soft_usage_pct'] == 200
for _ in xrange(10):
assert quotas.allow_send({'application': 'app_without_quota'})
|
houqp/iris-api
|
test/test_sender.py
|
Python
|
bsd-2-clause
| 9,206
|
"""
writing a wav file with numpy and scipy
"""
import numpy as np
import scipy.io.wavfile as wavfile
N = 168
x = np.arange(N)
y = 4 / np.pi*np.sin(2*np.pi*x/N)
y += 4 / (3*np.pi)*np.sin(6*np.pi*x/N)
y += 4 / (5*np.pi)*np.sin(10*np.pi*x/N)
y = np.tile(y, 1313)
y = y/max(y)
wavfile.write("sqwvfile.wav", 44100, y)
|
flawcode/sound_synth
|
sound003.py
|
Python
|
mit
| 317
|
"""
Tests for BlockCountsTransformer.
"""
# pylint: disable=protected-access
from openedx.core.djangoapps.content.block_structure.factory import BlockStructureFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.factories import SampleCourseFactory # lint-amnesty, pylint: disable=wrong-import-order
from ..block_counts import BlockCountsTransformer
class TestBlockCountsTransformer(ModuleStoreTestCase):
"""
Test behavior of BlockCountsTransformer
"""
def setUp(self):
super().setUp()
self.course_key = SampleCourseFactory.create().id
self.course_usage_key = self.store.make_course_usage_key(self.course_key)
self.block_structure = BlockStructureFactory.create_from_modulestore(self.course_usage_key, self.store)
def test_transform(self):
# collect phase
BlockCountsTransformer.collect(self.block_structure)
self.block_structure._collect_requested_xblock_fields()
# transform phase
BlockCountsTransformer(['problem', 'chapter']).transform(usage_info=None, block_structure=self.block_structure)
# block_counts
chapter_x_key = self.course_key.make_usage_key('chapter', 'chapter_x')
block_counts_for_chapter_x = self.block_structure.get_transformer_block_data(
chapter_x_key, BlockCountsTransformer,
)
block_counts_for_course = self.block_structure.get_transformer_block_data(
self.course_usage_key, BlockCountsTransformer,
)
# verify count of chapters
assert block_counts_for_course.chapter == 2
# verify count of problems
assert block_counts_for_course.problem == 6
assert block_counts_for_chapter_x.problem == 3
# verify other block types are not counted
for block_type in ['course', 'html', 'video']:
assert not hasattr(block_counts_for_course, block_type)
assert not hasattr(block_counts_for_chapter_x, block_type)
|
eduNEXT/edx-platform
|
lms/djangoapps/course_api/blocks/transformers/tests/test_block_counts.py
|
Python
|
agpl-3.0
| 2,085
|
import random
from pprint import pprint
from botlistbot.custemoji import Emoji
TEST = "{} Test".format(Emoji.ANCHOR)
BACK_TO_MENU = "{} Back to Menu".format(Emoji.LEFTWARDS_BLACK_ARROW)
EXIT = "🔙 Exit"
REFRESH = "🔄 Refresh"
ADD_BOT = "➕ Add new bot"
EDIT_BOT = "🛠 Edit Bot"
SEND_BOTLIST = "☑ Update BotList"
SEND_ACTIVITY_LOGS = "Activity Logs"
BACK = "{} Back".format(Emoji.BACK_WITH_LEFTWARDS_ARROW_ABOVE)
BACK_TO_CATEGORY = "{} to Category".format(Emoji.BACK_WITH_LEFTWARDS_ARROW_ABOVE)
APPROVE_BOTS = "Approve Bots"
SEND_CONFIG_FILES = "Runtime Files"
FIND_OFFLINE = "Find Offline Bots"
APPROVE_SUGGESTIONS = "Approve Suggestions"
PENDING_UPDATE = "Pending Bots"
SUGGESTION_PENDING_EMOJI = "👓"
CHANGE_SUGGESTION = "📝 Make Changes"
DONE = "🔚 Done"
SHARE = "Share"
# main menu
CATEGORIES = "📚 Categories"
EXPLORE = "🔄 Explore"
NEW_BOTS = "🆕 New Bots"
SEARCH = "🔎 Search"
CONTRIBUTING = "📤 Contributing"
EXAMPLES = "📝 Examples"
HELP = "❔ Help"
ADMIN_MENU = "🛃 Admin Menu"
SWITCH_PRIVATE = "📖️ Continue in private"
FAVORITES = "💖 My Favorites"
ADD_FAVORITE = "➕ Add"
REMOVE_FAVORITE = "➖ Remove"
REMOVE_FAVORITE_VERBOSE = "➖ Remove from 💖 Favorites"
ADD_TO_FAVORITES = "Add to 💖 Favorites"
PIN = "📍 Pin"
def random_done_delete():
choices = ["I'm done", "Okay, done with this", "Okay, clear this mess", "I got what I wanted",
"Don't need this anymore", "Keep this group spam-free", "Cool",
"Alright, delete this junk"]
return '🗑 {}'.format(random.choice(choices))
|
JosXa/BotListBot
|
botlistbot/captions.py
|
Python
|
mit
| 1,577
|
from django.core.management import call_command
from django.db import migrations
from corehq.toggles import SYNC_SEARCH_CASE_CLAIM
from corehq.util.django_migrations import skip_on_fresh_install
@skip_on_fresh_install
def _migrate_case_search_relevant(apps, schema_editor):
for domain in sorted(SYNC_SEARCH_CASE_CLAIM.get_enabled_domains()):
call_command('migrate_case_search_relevant', domain=domain)
class Migration(migrations.Migration):
dependencies = [
('app_manager', '0016_alter_exchangeapplication'),
]
operations = [
migrations.RunPython(_migrate_case_search_relevant,
reverse_code=migrations.RunPython.noop,
elidable=True),
]
|
dimagi/commcare-hq
|
corehq/apps/app_manager/migrations/0017_migrate_case_search_relevant.py
|
Python
|
bsd-3-clause
| 745
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Flat network interface. Useful for shared, flat networks.
"""
from neutronclient.common import exceptions as neutron_exceptions
from oslo_config import cfg
from oslo_log import log
from ironic.common import exception
from ironic.common.i18n import _, _LI, _LW
from ironic.common import neutron
from ironic.drivers import base
from ironic.drivers.modules.network import common
LOG = log.getLogger(__name__)
CONF = cfg.CONF
class FlatNetwork(common.VIFPortIDMixin, neutron.NeutronNetworkInterfaceMixin,
base.NetworkInterface):
"""Flat network interface."""
def __init__(self):
cleaning_net = CONF.neutron.cleaning_network
if not cleaning_net:
LOG.warning(_LW(
'Please specify a valid UUID or name for '
'[neutron]/cleaning_network configuration option so that '
'this interface is able to perform cleaning. Otherwise, '
'cleaning operations will fail to start.'))
def validate(self, task):
"""Validates the network interface.
:param task: a TaskManager instance.
:raises: InvalidParameterValue, if the network interface configuration
is invalid.
:raises: MissingParameterValue, if some parameters are missing.
"""
self.get_cleaning_network_uuid()
def add_provisioning_network(self, task):
"""Add the provisioning network to a node.
:param task: A TaskManager instance.
:raises: NetworkError when failed to set binding:host_id
"""
LOG.debug("Binding flat network ports")
node = task.node
host_id = node.instance_info.get('nova_host_id')
if not host_id:
return
client = neutron.get_client()
for port_like_obj in task.ports + task.portgroups:
vif_port_id = (
port_like_obj.internal_info.get(common.TENANT_VIF_KEY) or
port_like_obj.extra.get('vif_port_id')
)
if not vif_port_id:
continue
body = {
'port': {
'binding:host_id': host_id
}
}
try:
client.update_port(vif_port_id, body)
except neutron_exceptions.NeutronClientException as e:
msg = (_('Unable to set binding:host_id for '
'neutron port %(port_id)s. Error: '
'%(err)s') % {'port_id': vif_port_id, 'err': e})
LOG.exception(msg)
raise exception.NetworkError(msg)
def remove_provisioning_network(self, task):
"""Remove the provisioning network from a node.
:param task: A TaskManager instance.
"""
pass
def configure_tenant_networks(self, task):
"""Configure tenant networks for a node.
:param task: A TaskManager instance.
"""
pass
def unconfigure_tenant_networks(self, task):
"""Unconfigure tenant networks for a node.
:param task: A TaskManager instance.
"""
pass
def add_cleaning_network(self, task):
"""Add the cleaning network to a node.
:param task: A TaskManager instance.
:returns: a dictionary in the form {port.uuid: neutron_port['id']}
:raises: NetworkError, InvalidParameterValue
"""
# If we have left over ports from a previous cleaning, remove them
neutron.rollback_ports(task, self.get_cleaning_network_uuid())
LOG.info(_LI('Adding cleaning network to node %s'), task.node.uuid)
vifs = neutron.add_ports_to_network(
task, self.get_cleaning_network_uuid())
for port in task.ports:
if port.uuid in vifs:
internal_info = port.internal_info
internal_info['cleaning_vif_port_id'] = vifs[port.uuid]
port.internal_info = internal_info
port.save()
return vifs
def remove_cleaning_network(self, task):
"""Remove the cleaning network from a node.
:param task: A TaskManager instance.
:raises: NetworkError
"""
LOG.info(_LI('Removing ports from cleaning network for node %s'),
task.node.uuid)
neutron.remove_ports_from_network(task,
self.get_cleaning_network_uuid())
for port in task.ports:
if 'cleaning_vif_port_id' in port.internal_info:
internal_info = port.internal_info
del internal_info['cleaning_vif_port_id']
port.internal_info = internal_info
port.save()
|
NaohiroTamura/ironic
|
ironic/drivers/modules/network/flat.py
|
Python
|
apache-2.0
| 5,278
|
import logging
import logging.handlers
development_environ = True
monitor_period_in_s = 30
clock_period_in_s = 2
app_name = 'alarmclock'
mpd_music_folder = r'/var/lib/mpd/music'
local_music_folder = r'./ressources/music/'
local_playlist_folder = r'./ressources/playlist/'
log_folder = r'./log'
syslog_facility = logging.handlers.SysLogHandler.LOG_LOCAL7
user_config_file = r'user.ini'
mpd_restart_time = 5 #s
default_cross_fade = 3
initial_sound_volume = 10
ramp_up_period = 0.1 #s
|
musashin/alarmclock
|
clockconfig.py
|
Python
|
mit
| 488
|
#!/usr/bin/env python2.5
#
# Copyright 2010 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OrgAppRecord (Model) query functions for the GCI module.
"""
__authors__ = [
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
from soc.logic.models import org_app_record
from soc.models.org_app_record import OrgAppRecord as \
org_app_model
from soc.models.survey_record import SurveyRecord
DEF_ACCEPTED_TEMPLATE = \
'modules/gci/org_app_survey/mail/accepted_gci2010.html'
DEF_REJECTED_TEMPLATE = 'soc/org_app_survey/mail/rejected.html'
class Logic(org_app_record.Logic):
"""Logic class for OrgAppRecord.
"""
def __init__(self, model=org_app_model,
base_model=SurveyRecord, scope_logic=None, module_name='gci',
mail_templates={'accepted': DEF_ACCEPTED_TEMPLATE,
'rejected': DEF_REJECTED_TEMPLATE}):
"""Defines the name, key_name and model for this entity.
"""
super(Logic, self).__init__(
model=model, base_model=base_model, scope_logic=scope_logic,
module_name=module_name, mail_templates=mail_templates)
logic = Logic()
|
SRabbelier/Melange
|
app/soc/modules/gci/logic/models/org_app_record.py
|
Python
|
apache-2.0
| 1,648
|
from Tasks import *
import Pipeline
import Scheduler
import dendropy
sched = Scheduler.Scheduler()
pl = Pipeline.Pipeline(sched)
readgenes = pl.add_task(ReadPhylip('../test/data/some-genes.phylip'))
ft = pl.add_task(RunFastTree(cachefile='/tmp/fasttree-trees')).require(readgenes)
#astrid = pl.add_task(RunASTRID(cachefile='/tmp/astrid-tree')).require(ft)
astral = pl.add_task(RunASTRAL(cachefile='/tmp/astral-tree')).require(ft)
truest = pl.add_task(CastName("speciestree", "truespeciestree", dendropy.Tree)).require(pl.add_task(ReadSpeciesTree('../test/data/stree')))
#writest = pl.add_task(WriteSpeciesTree('/tmp/this-is-astridtree')).require(pl.add_task(CastName("estimatedspeciestree", "speciestree", dendropy.Tree)).require(astrid))
writest_astral = pl.add_task(WriteSpeciesTree('/tmp/this-is-astraltree')).require(pl.add_task(CastName("estimatedspeciestree", "speciestree", dendropy.Tree)).require(astral))
#compare = pl.add_task(CompareTrees().require(astrid).require(truest))
compare_astral = pl.add_task(CompareTrees().require(astral).require(truest))
pl.ready()
sched.run()
|
pranjalv123/TaxonDeletion
|
test/test.py
|
Python
|
gpl-3.0
| 1,121
|
class Person(object):
"""人的类"""
def __init__(self, name):
super(Person, self).__init__()
self.name = name
def anzhuang_zidan(self, dan_jia_temp, zi_dan_temp):
"""把子弹装到弹夹中"""
#弹夹.保存子弹(子弹)
dan_jia_temp.baocun_zidan(zi_dan_temp)
def anzhuang_danjia(self, gun_temp, dan_jia_temp):
"""把弹夹安装到枪中"""
#枪.保存弹夹(弹夹)
gun_temp.baocun_danjia(dan_jia_temp)
class Gun(object):
"""枪类"""
def __init__(self, name):
super(Gun, self).__init__()
self.name = name#用来记录枪的类型
self.danjia = None#用来记录弹夹对象的引用
def baocun_danjia(self, dan_jia_temp):
"""用一个属性来保存这个弹夹对象的引用"""
self.danjia = dan_jia_temp
def __str__(self):
if self.danjia:
return "枪的信息为:%s, %s"%(self.name, self.danjia)
else:
return "枪的信息为:%s,这把枪中没有弹夹"%(self.name)
class Danjia(object):
"""弹夹类"""
def __init__(self, max_num):
super(Danjia, self).__init__()
self.max_num = max_num#用来记录弹夹的最大容量
self.zidan_list = []#用来记录所有的子弹的引用
def baocun_zidan(self, zi_dan_temp):
"""将这颗子弹保存"""
self.zidan_list.append(zi_dan_temp)
def __str__(self):
return "弹夹的信息为:%d/%d"%(len(self.zidan_list), self.max_num)
class Zidan(object):
"""子弹类"""
def __init__(self, sha_shang_li):
super(Zidan, self).__init__()
self.sha_shang_li = sha_shang_li#这颗子弹的威力
def main():
"""用来控制整个程序的流程"""
#1. 创建老王对象
laowang = Person("老王")
#2. 创建一个枪对象
ak47 = Gun("AK47")
#3. 创建一个弹夹对象
dan_jia = Danjia(20)
#4. 创建一些子弹
for i in range(15):
zi_dan = Zidan(10)
#5. 老王把子弹安装到弹夹中
#老王.安装子弹到弹夹中(弹夹,子弹)
laowang.anzhuang_zidan(dan_jia, zi_dan)
#6. 老王把弹夹安装到枪中
#老王.安装弹夹到枪中(枪,弹夹)
laowang.anzhuang_danjia(ak47, dan_jia)
#test:测试弹夹的信息
print(dan_jia)
#test:测试枪的信息
print(ak47)
#7. 老王拿枪
#8. 创建一个敌人
#9. 老王开枪打敌人
if __name__ == '__main__':
main()
|
jameswatt2008/jameswatt2008.github.io
|
python/Python基础/截图和代码/加强/老王开枪/老王开枪-5-测试 弹夹、枪.py
|
Python
|
gpl-2.0
| 2,244
|
import logging
from django.http import Http404, HttpResponse
import requests
import json
from point import LocationPoint
#Logging
logger = logging.getLogger(__name__)
import os
import datetime, time
class ReittiopasAPI:
def __init__(self):
#init
self.__epsg_in='wgs84'
self.__epsg_out='wgs84'
self.__user=os.environ.get('REITTIOPASAPI_USER', '')
self.__passwd=os.environ.get('REITTIOPASAPI_PASSWD', '')
def get_reverse_geocode(self, coordinates):
result = LocationPoint()
json_response = self.execute_reverse_geocode(coordinates)
if json_response.status_code == requests.codes.ok:
try:
logger.debug(json_response.url)
r = json.dumps(json_response.json())
routes = json.loads(r)
for route in routes:
r = json.dumps(route["name"])
result.set_address(r.replace('"',""))
result.set_coords(coordinates)
except ValueError:
logger.debug(json_response.url)
logger.warn("Unknown location %s" % str(coordinates))
else:
logger.warn(json_response.status_code)
json_response.raise_for_status()
return result
def get_reverse_geocode_city(self, coordinates):
city = ""
json_response = self.execute_reverse_geocode(coordinates)
if json_response.status_code == requests.codes.ok:
try:
logger.debug(json_response.url)
r = json.dumps(json_response.json())
routes = json.loads(r)
for route in routes:
city = str(route["city"])
except ValueError:
logger.debug(json_response.url)
logger.warn("Unknown location %s" % str(coordinates))
else:
logger.warn(json_response.status_code)
json_response.raise_for_status()
return city
def execute_reverse_geocode(self, coordinates):
parameters = {'request': 'reverse_geocode',
'coordinate': coordinates,
'epsg_in':self.__epsg_in,
'epsg_out':self.__epsg_out,
'user':self.__user,
'pass': self.__passwd}
json_response = requests.get("http://api.reittiopas.fi/hsl/prod/", params=parameters)
return json_response
def is_empty(self, string):
if string != None:
if string != 0:
if len(string) > 0:
return False
return True
def get_geocode(self, address, city_coordinates):
result = LocationPoint()
parameters = {'request': 'geocode',
'key': address,
'loc_types': 'address',
'epsg_in':self.__epsg_in,
'epsg_out':self.__epsg_out,
'user':self.__user,
'pass': self.__passwd}
json_response = requests.get("http://api.reittiopas.fi/hsl/prod/", params=parameters)
if json_response.status_code == requests.codes.ok:
try:
logger.debug(json_response.url)
r = json.dumps(json_response.json())
routes = json.loads(r)
size = len(routes)
if size > 1:
if self.is_empty(city_coordinates) == False:
city = self.get_reverse_geocode_city(city_coordinates)
for route in routes:
r = json.dumps(route["matchedName"])
result.set_address(r.replace('"',""))
coordinates = str(route["coords"])
result.set_coords(coordinates)
if self.is_empty(city) == False and city == str(route["city"]):
return result
except ValueError:
logger.debug(json_response.url)
logger.warn("Unknown location %s" % str(address))
else:
logger.warn(json_response.status_code)
json_response.raise_for_status()
return result
def get_route_information(self, start, end, time1,walk_cost, change_margin):
#api.reittiopas.fi/hsl/prod/?request=route&from=2546445,6675512&to=2549445,6675513&time=1030&timetype=arrival
result = ""
a = time1 #.strptime(time1, "%Y-%m-%d %H:%M:%S")
d = "%s%02d%02d" % (a.year, a.month, a.day)
s = "%02d%02d" % (a.hour, a.minute)
start_time = s
start_date = int(d)
logger.warn(d)
logger.warn(s)
if change_margin == 0 or change_margin == "":
change_margin = 1.0
parameters = {'request': 'route',
'from': start,
'to': end,
'date': start_date,
'time': start_time,
'walk_cost': walk_cost,
'change_margin': change_margin,
'epsg_in':self.__epsg_in,
'epsg_out':self.__epsg_out,
'user':self.__user,
'pass': self.__passwd}
json_response = requests.get("http://api.reittiopas.fi/hsl/prod/", params=parameters)
if json_response.status_code == requests.codes.ok:
try:
logger.debug(json_response.url)
result = json.loads(json.dumps(json_response.json()))
except ValueError:
logger.debug(json_response.url)
logger.warn("Unknown route %s %s" % (str(start), str(end)))
else:
logger.debug(json_response.url)
logger.warn(json_response.status_code)
json_response.raise_for_status()
return result
|
apps8os/trip-chain-game
|
tripchaingame/web/reittiopasAPI.py
|
Python
|
mit
| 6,013
|
# encoding.py
# Copyright (C) 2011-2014 Andrew Svetlov
# andrew.svetlov@gmail.com
#
# This module is part of BloggerTool and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from __future__ import absolute_import
import markdown
from bloggertool.engine import Meta
class Engine(object):
MARKDOWN_EXTS = ['abbr',
# 'codehilite', # see http://pygments.org/docs/
'def_list',
'fenced_code',
# default at end of html or ///Footnotes Go Here ///
'footnotes',
# configure via header_level and header_forceid: Off
# in md metadata
'headerid',
'meta',
'tables',
'toc', # use [TOC] in md file
]
def do(self, source):
md = markdown.Markdown(extensions=self.MARKDOWN_EXTS)
inner_html = md.convert(source)
meta = Meta
if 'title' in md.Meta:
meta.title = ' '.join(md.Meta['title'])
if 'slug' in md.Meta:
assert len(md.Meta['slug']) == 1
slug = md.Meta['slug'][0]
meta.slug = slug
if 'labels' in md.Meta:
labels_str = ', '.join(md.Meta['labels'])
labels = [l.strip() for l in labels_str.split(',')]
meta.labels = frozenset(labels)
return inner_html, meta
|
asvetlov/bloggertool
|
lib/bloggertool/engine/markdown.py
|
Python
|
mit
| 1,490
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
# http://peak.telecommunity.com/DevCenter/setuptools#developer-s-guide
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
try:
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
except IOError:
CHANGES = ''
requires = ['nine', 'bag>=0.3.4', 'deform', 'Babel', 'lingua']
setup(
name='deform_pure',
version='0.0.0',
description="Pure CSS templates for the deform form library.",
long_description='\n\n'.join([README, CHANGES]),
author='Nando Florestan',
author_email='nandoflorestan@gmail.com',
url='https://github.com/nandoflorestan/deform_pure',
keywords='twitter bootstrap deform styles css web forms form',
classifiers=[ # http://pypi.python.org/pypi?:action=list_classifiers
"Development Status :: 4 - Beta",
# "Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
'License :: OSI Approved :: BSD License',
"Environment :: Web Environment",
"Framework :: Pyramid",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Libraries :: Python Modules",
],
license='BSD',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="deform_pure.jests",
)
|
nandoflorestan/deform_pure
|
setup.py
|
Python
|
bsd-3-clause
| 1,712
|
#-*- coding: UTF-8 -*-
import sys
from distutils.core import setup
setup(name='SynchroniZerD',
version='1.0.0',
description='A simple folder synchronizer application that uses rsync.',
long_description='SynchroniZeRD is a folder synchronizer application that uses rsync to synchronize and wxWidgets for the UI',
author='Ronnie Andrew',
author_email='ronnieandrew92@gmail.com',
url='http://www.launchpad.net/synchronizerd',
license="GNU GPLv3",
# py_modules=['synchronizerd'],
scripts=['synchronizerd'],
#packages=['synchronizerd'],
data_files=[('/opt/synchronizerd',
['synchronizerd.py', 'synchronizer-rd.png', 'paypal_logo.jpg', 'synchronizerd', 'COPYING',
'GNU_HTML']), (
'share/applications', ['synchronizerd.desktop']), ('/opt/synchronizerd/views',
['views/synchronizerd_ui.py',
'views/__init__.py', 'views/synchronizerd.ui',
'views/synchronizerd_ui_pyside.py',
'views/feedback_ui.py',
'views/feedback_ui_pyside.py',
'views/paypal_ui.py',
'views/paypal_ui_pyside.py']),
('share/icons/hicolor/256x256/apps', ['synchronizer-rd.png']),
('share/synchronizerd', ['synchronizer-rd.png', 'paypal_logo.jpg', 'COPYING', 'GNU_HTML'])],
requires=['PyQt4', 'PySide', 'paypalrestsdk']
)
|
ROAND/filesynchronizer
|
setup.py
|
Python
|
gpl-3.0
| 1,865
|
#! /usr/bin/env python
from flaskext.testing import TestCase as Base
from fixture import SQLAlchemyFixture, NamedDataStyle
from cfmi import create_app, db
from cfmi.database.dicom import (DicomSubject, Series)
from cfmi.settings import TestConfig
from tests import fixtures
dbfixture = SQLAlchemyFixture(env=fixtures,
style=NamedDataStyle())
class TestCase(Base):
def create_app(self):
app = create_app(TestConfig())
db.app = app
db.init_app(app)
return app
def setUp(self):
db.create_all()
dbfixture.session=db.session
def tearDown(self):
db.session.remove()
db.drop_all()
|
nocko/cfmi
|
tests/__init__.py
|
Python
|
bsd-3-clause
| 712
|
"""
Used as entry point for mayatest from commandline
"""
if __name__ == "__main__":
from mayatest.cli import main
main()
|
arubertoson/mayatest
|
mayatest/__main__.py
|
Python
|
mit
| 131
|
def get_events(session):
"""Return list of key events recorded in the test_keys_page fixture."""
events = session.execute_script("return allEvents.events;") or []
# `key` values in `allEvents` may be escaped (see `escapeSurrogateHalf` in
# test_keys_wdspec.html), so this converts them back into unicode literals.
for e in events:
# example: turn "U+d83d" (6 chars) into u"\ud83d" (1 char)
if "key" in e and e["key"].startswith(u"U+"):
key = e["key"]
hex_suffix = key[key.index("+") + 1:]
e["key"] = unichr(int(hex_suffix, 16))
return events
def get_keys(input_el):
"""Get printable characters entered into `input_el`.
:param input_el: HTML input element.
"""
rv = input_el.property("value")
if rv is None:
return ""
else:
return rv
def filter_dict(source, d):
"""Filter `source` dict to only contain same keys as `d` dict.
:param source: dictionary to filter.
:param d: dictionary whose keys determine the filtering.
"""
return {k: source[k] for k in d.keys()}
|
paulrouget/servo
|
tests/wpt/web-platform-tests/webdriver/tests/release_actions/support/refine.py
|
Python
|
mpl-2.0
| 1,104
|
from __future__ import absolute_import, division, print_function, unicode_literals
import re
import os
from logging import getLogger
from typing import Tuple # noqa: F401
import json
import heprefs.invenio as invenio
try:
from urllib import quote_plus # type: ignore # noqa
from urllib2 import urlopen, HTTPError # type: ignore # noqa
except ImportError:
from urllib.parse import quote_plus
from urllib.request import urlopen
logger = getLogger(__name__)
class InspireArticle(object):
API = 'https://inspirehep.net/search'
RECORD_PATH = 'http://inspirehep.net/record/'
ARXIV_SERVER = 'https://arxiv.org'
DOI_SERVER = 'https://dx.doi.org'
DATA_KEY = 'primary_report_number,recid,system_control_number,' + \
'authors,corporate_name,title,abstract,publication_info,files'
LIKELY_PATTERNS = [
r'^(doi:)?10\.\d{4,}/.*$', # doi
r'^find? .+', # old spires style
]
@classmethod
def get_info(cls, query):
query_url = '{}?p={}&of=recjson&ot={}&rg=3'.format(cls.API, quote_plus(query), cls.DATA_KEY)
try:
f = urlopen(query_url) # "with" does not work on python2
s = f.read()
f.close()
except HTTPError as e:
raise Exception('Failed to fetch inspireHEP information: ' + e.__str__())
try:
results = json.loads(s.decode('utf-8'))
except Exception as e:
raise Exception('parse failed; query {} to inspireHEP gives no result?: '.format(query) + e.__str__())
if (not isinstance(results, list)) or len(results) == 0:
raise Exception('query {} to inspireHEP gives no result: '.format(query))
if len(results) > 1:
warning_text = 'more than one entries are found, whose titles are' + os.linesep
for i in results:
title = i.get('title', dict()).get('title') or 'unknown ' + i.get('primary_report_number')
warning_text += ' ' + title + os.linesep
logger.warning(warning_text)
result = results[0]
return result
@classmethod
def try_to_construct(cls, query, force=False):
if not force:
if not any(re.match(r, query) for r in cls.LIKELY_PATTERNS):
return False
return cls(query)
def __init__(self, query):
self.query = query
self._info = None
@property
def info(self):
if not self._info:
self._info = self.get_info(self.query)
return self._info
def abs_url(self):
# type: () -> str
if 'doi' in self.info:
return '{}/{}'.format(self.DOI_SERVER, self.info['doi'])
arxiv_id = invenio.arxiv_id(self.info)
if arxiv_id:
return '{}/abs/{}'.format(self.ARXIV_SERVER, arxiv_id)
if 'recid' in self.info:
return self.RECORD_PATH + str(self.info['recid'])
return ''
def pdf_url(self):
# type: () -> str
scoap3_url = [i['url'] for i in self.info.get('files', []) if i['full_name'] == 'scoap3-fulltext.pdf']
if scoap3_url:
return scoap3_url[0]
arxiv_id = invenio.arxiv_id(self.info)
if arxiv_id:
return '{}/pdf/{}'.format(self.ARXIV_SERVER, arxiv_id)
pdf_files = [i for i in self.info.get('files', []) if i['superformat'] == '.pdf']
if pdf_files:
if len(pdf_files) > 1:
logger.warning('Fulltext PDF file is guessed by its size.')
pdf_files.sort(key=lambda i: int(i.get('size', 0)), reverse=True)
return pdf_files[0].get('url', '')
return ''
def title(self):
# type: () -> str
return re.sub(r'\s+', ' ', invenio.title(self.info))
def authors(self):
# type: () -> str
collaborations_list = invenio.collaborations(self.info)
if collaborations_list:
return ', '.join([c + ' (collaboration)' for c in collaborations_list])
else:
return ', '.join(invenio.flatten_authors(self.info))
def authors_short(self):
# type: () -> str
return invenio.shorten_authors_text(self.info)
def first_author(self):
# type: () -> str
a = self.authors()
return a[0] if len(a) > 0 else ''
def texkey(self):
# type: () -> str
scn = self.info.get('system_control_number')
if scn:
if isinstance(scn, dict):
scn = [scn]
texkeys = [i['value'] for i in scn if i['institute'] == 'INSPIRETeX']
if len(texkeys) > 1:
logger.warning('multiple TeX-keys are found? : ' + ' & '.join(texkeys))
return texkeys[0] if texkeys else ''
return ''
def publication_info(self):
# type: () -> str
return invenio.publication_info_text(self.info)
def download_parameters(self):
# type: () -> Tuple[str, str]
url = self.pdf_url()
if not url:
return '', ''
arxiv_id = invenio.arxiv_id(self.info)
primary_report_number = invenio.primary_report_number(self.info)
file_title = \
arxiv_id if arxiv_id else \
primary_report_number if primary_report_number else \
self.info['doi'] if 'doi' in self.info else \
'unknown'
names = invenio.shorten_authors_text(self.info).replace(', ', '-').replace('et al.', 'etal')
filename = '{title}-{names}.pdf'.format(title=file_title, names=names)
return url, filename
def debug(self):
data = {
'abs_url': self.abs_url(),
'pdf_url': self.pdf_url(),
'title': self.title(),
'authors': self.authors(),
'first_author': self.first_author(),
'texkey': self.texkey(),
'publication_info': self.publication_info(),
'(download_filename)': self.download_parameters()[1],
'(collaborations)': invenio.collaborations(self.info)
}
for k, v in data.items():
logger.debug('{}: {}'.format(k, v))
|
misho104/heprefs
|
heprefs/inspire_article.py
|
Python
|
mit
| 6,169
|
import logging
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import useragents
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
from streamlink.utils import parse_json
log = logging.getLogger(__name__)
class Mitele(Plugin):
_url_re = re.compile(r"https?://(?:www\.)?mitele\.es/directo/(?P<channel>[\w-]+)")
pdata_url = "https://indalo.mediaset.es/mmc-player/api/mmc/v1/{channel}/live/html5.json"
gate_url = "https://gatekeeper.mediaset.es"
error_schema = validate.Schema({
"code": validate.any(validate.text, int),
"message": validate.text,
})
pdata_schema = validate.Schema(validate.transform(parse_json), validate.any(
validate.all(
{
"locations": [{
"gcp": validate.text,
"ogn": validate.any(None, validate.text),
}],
},
validate.get("locations"),
validate.get(0),
),
error_schema,
))
gate_schema = validate.Schema(
validate.transform(parse_json),
validate.any(
{
"mimeType": validate.text,
"stream": validate.url(),
},
error_schema,
)
)
def __init__(self, url):
super(Mitele, self).__init__(url)
self.session.http.headers.update({
"User-Agent": useragents.FIREFOX,
"Referer": self.url
})
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url) is not None
def _get_streams(self):
channel = self._url_re.match(self.url).group("channel")
pdata = self.session.http.get(self.pdata_url.format(channel=channel),
acceptable_status=(200, 403, 404),
schema=self.pdata_schema)
log.trace("{0!r}".format(pdata))
if pdata.get("code"):
log.error("{0} - {1}".format(pdata["code"], pdata["message"]))
return
gdata = self.session.http.post(self.gate_url,
acceptable_status=(200, 403, 404),
data=pdata,
schema=self.gate_schema)
log.trace("{0!r}".format(gdata))
if gdata.get("code"):
log.error("{0} - {1}".format(gdata["code"], gdata["message"]))
return
log.debug("Stream: {0} ({1})".format(gdata["stream"], gdata.get("mimeType", "n/a")))
for s in HLSStream.parse_variant_playlist(self.session,
gdata["stream"],
name_fmt="{pixels}_{bitrate}").items():
yield s
__plugin__ = Mitele
|
repotvsupertuga/tvsupertuga.repository
|
script.module.streamlink.base/resources/lib/streamlink/plugins/mitele.py
|
Python
|
gpl-2.0
| 2,844
|
# -*- coding: utf-8 -*-
###############################################################################
#
# Friends
# Retrieves a list of names and profile IDs for Facebook friends associated with a specified user.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class Friends(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the Friends Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(Friends, self).__init__(temboo_session, '/Library/Facebook/Reading/Friends')
def new_input_set(self):
return FriendsInputSet()
def _make_result_set(self, result, path):
return FriendsResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return FriendsChoreographyExecution(session, exec_id, path)
class FriendsInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the Friends
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The access token retrieved from the final OAuth step.)
"""
super(FriendsInputSet, self)._set_input('AccessToken', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) A comma separated list of fields to return (i.e. id,name).)
"""
super(FriendsInputSet, self)._set_input('Fields', value)
def set_Limit(self, value):
"""
Set the value of the Limit input for this Choreo. ((optional, integer) Used to page through results. Limits the number of records returned in the response.)
"""
super(FriendsInputSet, self)._set_input('Limit', value)
def set_Offset(self, value):
"""
Set the value of the Offset input for this Choreo. ((optional, integer) Used to page through results. Returns results starting from the specified number.)
"""
super(FriendsInputSet, self)._set_input('Offset', value)
def set_ProfileID(self, value):
"""
Set the value of the ProfileID input for this Choreo. ((optional, string) The id of the profile to retrieve friends for. Defaults to "me" indicating the authenticated user.)
"""
super(FriendsInputSet, self)._set_input('ProfileID', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Can be set to xml or json. Defaults to json.)
"""
super(FriendsInputSet, self)._set_input('ResponseFormat', value)
def set_Since(self, value):
"""
Set the value of the Since input for this Choreo. ((optional, date) Used for time-based pagination. Values can be a unix timestamp or any date accepted by strtotime.)
"""
super(FriendsInputSet, self)._set_input('Since', value)
def set_Until(self, value):
"""
Set the value of the Until input for this Choreo. ((optional, date) Used for time-based pagination. Values can be a unix timestamp or any date accepted by strtotime.)
"""
super(FriendsInputSet, self)._set_input('Until', value)
class FriendsResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the Friends Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Facebook. Corresponds to the ResponseFormat input. Defaults to JSON.)
"""
return self._output.get('Response', None)
def get_HasNext(self):
"""
Retrieve the value for the "HasNext" output from this Choreo execution. ((boolean) A boolean flag indicating that a next page exists.)
"""
return self._output.get('HasNext', None)
def get_HasPrevious(self):
"""
Retrieve the value for the "HasPrevious" output from this Choreo execution. ((boolean) A boolean flag indicating that a previous page exists.)
"""
return self._output.get('HasPrevious', None)
class FriendsChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return FriendsResultSet(response, path)
|
jordanemedlock/psychtruths
|
temboo/core/Library/Facebook/Reading/Friends.py
|
Python
|
apache-2.0
| 5,502
|
"""
Data collectors for trajectory analysis.
"""
import logging
from . import measure
from .atoms import Selection
__all__ = ['AngleCollector', 'Collector', 'DihedralCollector', 'DistanceCollector', 'FrameCollector', 'RMSDCollector',
'XCoordCollector', 'YCoordCollector', 'ZCoordCollector']
LOGGER = logging.getLogger(__name__)
class Collector(object):
"""
Base class for collectors. Collects data from trajectory and writes them into file.
"""
# Format of the column header in the output
header_fmt = '%10s'
# Format of the data in the output
data_fmt = '%10.4f'
# Counter for automatic name generation.
auto_name_counter = 0
def __init__(self, name=None):
"""
Create the collector.
@param name: Name of the collector. If not provided, it is generated in form 'data#####'.
"""
if name is None:
Collector.auto_name_counter += 1
name = 'data%05d' % Collector.auto_name_counter
self.name = name
def collect(self, step):
"""
Performs the analysis on the frame.
Derived class must implement this method.
"""
raise NotImplementedError
class FrameCollector(Collector):
"""
Utility collector which collects frame number.
"""
header_fmt = '%8s'
data_fmt = '%8d'
def collect(self, step):
return step.frame
def _selection_center(selection_text, molecule):
"""
Utility method to get center of selection.
"""
sel = Selection(selection_text, molecule)
if not len(sel):
raise ValueError("Selection '%s' doesn't match any atoms." % selection_text)
return measure.center(sel)
class BaseCoordCollector(Collector):
"""
Base class for collectors of X, Y and Z coordinates.
"""
def __init__(self, selection, name=None):
"""
Creates coordinate collector.
@param selection: Selection text for collector.
@type selection: String
"""
super(BaseCoordCollector, self).__init__(name)
self.selection = selection
class XCoordCollector(BaseCoordCollector):
"""
Collects X coordinate of atom or center of selection.
"""
def collect(self, step):
return _selection_center(self.selection, step.molecule)[0]
class YCoordCollector(BaseCoordCollector):
"""
Collects Y coordinate of atom or center of selection.
"""
def collect(self, step):
return _selection_center(self.selection, step.molecule)[1]
class ZCoordCollector(BaseCoordCollector):
"""
Collects Z coordinate of atom or center of selection.
"""
def collect(self, step):
return _selection_center(self.selection, step.molecule)[2]
class DistanceCollector(Collector):
"""
Collects distance between two atoms or centers of atoms.
"""
def __init__(self, selection1, selection2, name=None):
"""
Creates distance collector.
@param selection1: Selection text for collector.
@type selection1: String
@param selection2: Selection text for collector.
@type selection2: String
"""
super(DistanceCollector, self).__init__(name)
self.selection1 = selection1
self.selection2 = selection2
def collect(self, step):
center1 = _selection_center(self.selection1, step.molecule)
center2 = _selection_center(self.selection2, step.molecule)
return measure.coords_distance(center1, center2)
class AngleCollector(Collector):
"""
Collects angle between three atoms or centers of atoms.
"""
def __init__(self, selection1, selection2, selection3, name=None):
"""
Creates distance collector.
@param selection1: Selection text for collector.
@type selection1: String
@param selection2: Selection text for collector.
@type selection2: String
@param selection3: Selection text for collector.
@type selection3: String
"""
super(AngleCollector, self).__init__(name)
self.selection1 = selection1
self.selection2 = selection2
self.selection3 = selection3
def collect(self, step):
center1 = _selection_center(self.selection1, step.molecule)
center2 = _selection_center(self.selection2, step.molecule)
center3 = _selection_center(self.selection3, step.molecule)
return measure.coords_angle(center1, center2, center3)
class DihedralCollector(Collector):
"""
Collects dihedral angle of four atoms or centers of atoms.
"""
def __init__(self, selection1, selection2, selection3, selection4, name=None):
"""
Creates distance collector.
@param selection1: Selection text for collector.
@type selection1: String
@param selection2: Selection text for collector.
@type selection2: String
@param selection3: Selection text for collector.
@type selection3: String
@param selection4: Selection text for collector.
@type selection4: String
"""
super(DihedralCollector, self).__init__(name)
self.selection1 = selection1
self.selection2 = selection2
self.selection3 = selection3
self.selection4 = selection4
def collect(self, step):
center1 = _selection_center(self.selection1, step.molecule)
center2 = _selection_center(self.selection2, step.molecule)
center3 = _selection_center(self.selection3, step.molecule)
center4 = _selection_center(self.selection4, step.molecule)
return measure.coords_dihedral(center1, center2, center3, center4)
class RMSDCollector(Collector):
"""
Collects RMSD data.
"""
def __init__(self, selection, reference, name=None):
"""
Creates RMSD collector.
@param selection: Selection text for RMSD
@type selection: String
@param reference: Reference for RMSD
@type reference: Selection
"""
assert isinstance(reference, Selection)
super(RMSDCollector, self).__init__(name)
self.selection = selection
self.reference = reference
def collect(self, step):
# Active frame number of the molecule.
cur_frame = step.molecule.frame
# Duplicate the trajectory frame because we will modify the coordinates.
# This also sets the molecule frame to the duplicated frame.
step.molecule.frames.copy()
# Duplicated frame number
dup_frame = step.molecule.frame
all_atoms = Selection('all', step.molecule)
sel = Selection(self.selection, step.molecule)
# Align coordinates to the reference
all_atoms.atomsel.move(sel.atomsel.fit(self.reference.atomsel))
# Measure RMSD
rmsd = sel.atomsel.rmsd(self.reference.atomsel)
# Delete the duplicated frame and reset trajectory frame
del step.molecule.frames[dup_frame]
step.molecule.frame = cur_frame
# Return the RMSD value
return rmsd
|
ziima/pyvmd
|
pyvmd/collectors.py
|
Python
|
gpl-3.0
| 7,093
|
from BaseController import BaseController
import datetime
import redis
class SlowlogController(BaseController):
def get(self):
data={}
data['data']=[]
server = self.get_argument("server").split(':')
connection = redis.Redis(host=server[0], port=(int)(server[1]), db=0,socket_timeout=1)
logs = connection.execute_command('slowlog','get','128')
for lid,timeticks,run_micro,commands in logs:
timestamp = datetime.datetime.fromtimestamp(int(timeticks))
cmd=' '.join(commands)
data['data'].append({'id':lid,'time':str(timestamp),'escapeMs':(float)(run_micro)/1000,'cmd':cmd})
self.write(data)
|
yyy1394/redis-monitor
|
src/api/controller/SlowlogController.py
|
Python
|
mit
| 685
|
# -*- coding: utf-8 -*-
import attr
from navmazing import NavigateToAttribute, NavigateToSibling
from widgetastic.widget import View, NoSuchElementException, Text
from widgetastic_manageiq import Accordion, ManageIQTree, PaginationPane, SummaryTable, Table
from widgetastic_patternfly import BreadCrumb, Button, Dropdown
from cfme.base.ui import BaseLoggedInPage
from cfme.common import TagPageView, Taggable, PolicyProfileAssignable
from cfme.exceptions import ItemNotFound
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.appliance.implementations.ui import CFMENavigateStep, navigator, navigate_to
from cfme.utils.providers import get_crud_by_name
class StorageManagerToolbar(View):
"""The toolbar on the Storage Manager or Provider page"""
configuration = Dropdown('Configuration')
policy = Dropdown('Policy')
class StorageManagerDetailsToolbar(View):
"""The toolbar on the Storage Manager or Provider detail page"""
reload = Button(title='Refresh this page')
configuration = Dropdown('Configuration')
policy = Dropdown('Policy')
monitoring = Dropdown('Monitoring')
download = Button(title='Download summary in PDF format')
class StorageManagerEntities(View):
"""The entities on the main list Storage Manager or Provider page"""
table = Table(".//div[@id='list_grid' or @class='miq-data-table']/table")
class StorageManagerDetailsEntities(View):
"""The entities on the Storage Manager or Provider details page"""
breadcrumb = BreadCrumb()
properties = SummaryTable('Properties')
relationships = SummaryTable('Relationships')
smart_management = SummaryTable('Smart Management')
status = SummaryTable('Status')
class StorageManagerDetailsAccordion(View):
"""The accordion on the Storage Manager or Provider details page"""
@View.nested
class properties(Accordion): # noqa
tree = ManageIQTree()
@View.nested
class relationships(Accordion): # noqa
tree = ManageIQTree()
class StorageManagerView(BaseLoggedInPage):
"""A base view for all the Storage Manager or Provider pages"""
title = Text('.//div[@id="center_div" or @id="main-content"]//h1')
@property
def in_manager(self):
navigation_path = self.context['object'].navigation_path
return(
self.logged_in_as_current_user and
self.navigation.currently_selected == navigation_path)
class StorageManagerAllView(StorageManagerView):
"""The all Storage Manager or Provider page"""
@property
def is_displayed(self):
return (
self.in_manager and
self.title.text in ('Storage Managers', self.context['object'].manager_type))
toolbar = View.nested(StorageManagerToolbar)
entities = View.nested(StorageManagerEntities)
paginator = PaginationPane()
class ProviderStorageManagerAllView(StorageManagerAllView):
@property
def is_displayed(self):
return (
self.logged_in_as_current_user and
self.navigation.currently_selected == ['Compute', 'Clouds', 'Providers'] and
self.title.text == '{} (All Storage Managers)'.format(self.context['object'].name)
)
class StorageManagerDetailsView(StorageManagerView):
"""The details page for Storage Manager or Provider"""
@property
def is_displayed(self):
expected_title = '{} (Summary)'.format(self.context['object'].name)
return(
self.title.text == expected_title and
self.entities.breadcrumb.active_location == expected_title)
toolbar = View.nested(StorageManagerDetailsToolbar)
sidebar = View.nested(StorageManagerDetailsAccordion)
entities = View.nested(StorageManagerDetailsEntities)
@attr.s
class StorageManager(BaseEntity, Taggable, PolicyProfileAssignable):
""" Model of an storage manager in cfme
Args:
collection: Instance of collection
name: Name of the object manager.
provider: Provider
"""
name = attr.ib()
provider = attr.ib()
storage_title = 'Storage Manager'
@property
def navigation_path(self):
return self.parent.navigation_path
@property
def manager_type(self):
return self.parent.manager_type
def refresh(self, cancel=False):
"""Refresh storage manager"""
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Refresh Relationships and Power States',
handle_alert=not cancel)
if not cancel:
view.flash.assert_no_error()
def delete(self):
"""Delete storage manager"""
view = navigate_to(self, 'Details')
if self.appliance.version < '5.9':
remove_item = 'Remove this {}'.format(self.storage_title)
else:
remove_item = 'Remove this {} from Inventory'.format(self.storage_title)
view.toolbar.configuration.item_select(remove_item, handle_alert=True)
view = self.create_view(StorageManagerDetailsView)
view.flash.assert_no_error()
@property
def exists(self):
try:
navigate_to(self, 'Details')
return True
except ItemNotFound:
return False
@attr.s
class BlockManagerCollection(BaseCollection):
"""Collection object [block manager] for the :py:class:'cfme.storage.manager'"""
ENTITY = StorageManager
manager_type = 'Block Storage Managers'
navigation_path = ['Storage', 'Block Storage', 'Managers']
def all(self):
"""returning all block storage manager objects and support filtering as per provider"""
provider = self.filters.get("provider")
blocks = ("Cinder Manager", "EBS Storage Manager")
prov_db = {prov.id: prov for prov in self.appliance.rest_api.collections.providers.all}
managers = [
prov for prov in prov_db.values() if any(block in prov.name for block in blocks)
]
if provider:
return [
self.instantiate(name=mag.name, provider=provider)
for mag in managers
if provider.id == mag.parent_ems_id
]
else:
return [
self.instantiate(
name=mag.name, provider=get_crud_by_name(prov_db[mag.parent_ems_id].name)
)
for mag in managers
]
@attr.s
class ObjectManagerCollection(BaseCollection):
"""Collection object [object manager] for the :py:class:'cfme.storage.manager'"""
ENTITY = StorageManager
manager_type = 'Object Storage Managers'
navigation_path = ['Storage', 'Object Storage', 'Managers']
def all(self):
"""returning all object storage manager objects and support filtering as per provider"""
provider = self.filters.get("provider")
prov_db = {prov.id: prov for prov in self.appliance.rest_api.collections.providers.all}
managers = [
prov for prov in prov_db.values() if "Swift Manager" in prov.name
]
if provider:
return [
self.instantiate(name=mag.name, provider=provider)
for mag in managers
if provider.id == mag.parent_ems_id
]
else:
return [
self.instantiate(
name=mag.name, provider=get_crud_by_name(prov_db[mag.parent_ems_id].name)
)
for mag in managers
]
@navigator.register(BlockManagerCollection, 'All')
@navigator.register(ObjectManagerCollection, 'All')
class StorageManagerAll(CFMENavigateStep):
VIEW = StorageManagerAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self, *args, **kwargs):
self.prerequisite_view.navigation.select(*self.obj.navigation_path)
@navigator.register(StorageManager, 'Details')
class StorageManagerDetails(CFMENavigateStep):
VIEW = StorageManagerDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self, *args, **kwargs):
try:
row = self.prerequisite_view.paginator.find_row_on_pages(
self.prerequisite_view.entities.table, Name=self.obj.name)
row.click()
except NoSuchElementException:
raise ItemNotFound('Could not locate {}'.format(self.obj.name))
@navigator.register(StorageManager, 'EditTagsFromDetails')
class StorageManagerDetailEditTag(CFMENavigateStep):
""" This navigation destination help to WidgetasticTaggable"""
VIEW = TagPageView
prerequisite = NavigateToSibling('Details')
def step(self, *args, **kwargs):
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
|
anurag03/integration_tests
|
cfme/storage/manager.py
|
Python
|
gpl-2.0
| 8,783
|
import numpy as np
import tensorflow as tf
import data_helpers as dh
from datetime import datetime, date, time, timedelta
'''
get pids for training date
get x_train for training date
get y_train for training date
get x_test for testing date
get y_test for testing date
'''
def getTrainingData(curDate, split_frac = 0.8):
pidList = dh.getPids(curDate)
labels = dh.getLabel(curDate)
x_train = [dh.transform_v3(pid, 200) for pid in pidList]
y_train = np.array([dh.isReturn(labels,pid) for pid in pidList])
z_ = sum(y_train == 0)
o_ = sum(y_train == 1)
print("Zero: {0}, One: {1}".format(z_,o_))
print("%age Return: {0}", o_/(o_+z_))
features = np.zeros((len(x_train), 200), dtype=int)
for i, row in enumerate(x_train):
features[i, -len(row):] = np.array(row)
split_idx = int(len(features)*split_frac)
return features[:split_idx], features[split_idx:], y_train[:split_idx], y_train[split_idx:]
def get_batches(x, y, batch_size=100):
n_batches = len(x)//batch_size
x, y = x[:n_batches*batch_size], y[:n_batches*batch_size]
for ii in range(0, len(x), batch_size):
yield x[ii:ii+batch_size], y[ii:ii+batch_size]
def createLSTM(lstm_size = 1024, lstm_layers = 4, batch_size = 256, learning_rate = 0.001, kp = 1):
# Create the graph object
graph = tf.Graph()
# Add nodes to the graph
with graph.as_default():
inputs_ = tf.placeholder(tf.int32, [None, None], name='inputs')
labels_ = tf.placeholder(tf.int32, [None, None], name='labels')
keep_prob = tf.placeholder(tf.float32, name='keep_prob')
num_events = 11
embd_size = 16
embedding = tf.Variable(tf.random_uniform((num_events, embd_size), -1, 1))
embed = tf.nn.embedding_lookup(embedding, inputs_)
def make_cell(rnn_size):
lstm_cell = tf.contrib.rnn.LSTMCell(rnn_size,
initializer=tf.random_uniform_initializer(-0.1, 0.1, seed=2))
lstm_cell = tf.contrib.rnn.DropoutWrapper(lstm_cell, output_keep_prob=kp)
return lstm_cell
#enc_cell = tf.contrib.rnn.MultiRNNCell([make_cell(rnn_size) for _ in range(num_layers)])
#lstm = tf.contrib.rnn.BasicLSTMCell(lstm_size)
# # Add dropout to the cell
# drop = tf.contrib.rnn.DropoutWrapper(lstm, output_keep_prob=keep_prob)
# Stack up multiple LSTM layers, for deep learning
#cell = tf.contrib.rnn.MultiRNNCell([drop] * lstm_layers)
cell = tf.contrib.rnn.MultiRNNCell([make_cell(lstm_size) for _ in range(lstm_layers)])
# Getting an initial state of all zeros
initial_state = cell.zero_state(batch_size, tf.float32)
outputs, final_state = tf.nn.dynamic_rnn(cell, embed,
initial_state=initial_state)
predictions = tf.contrib.layers.fully_connected(outputs[:, -1], 1, activation_fn=tf.sigmoid)
cost = tf.losses.mean_squared_error(labels_, predictions)
optimizer = tf.train.AdamOptimizer(learning_rate).minimize(cost)
correct_pred = tf.equal(tf.cast(tf.round(predictions), tf.int32), labels_)
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
saver = tf.train.Saver()
epochs = 10
x_train, x_val, y_train, y_val = getTrainingData(dh.DATE)
with tf.Session(graph=graph) as sess:
sess.run(tf.global_variables_initializer())
iteration = 1
for e in range(epochs):
state = sess.run(initial_state)
for ii, (x, y) in enumerate(get_batches(x_train, y_train, batch_size), 1):
feed = {inputs_: x,
labels_: y[:, None],
keep_prob: 0.5,
initial_state: state}
loss, state, _ = sess.run([cost, final_state, optimizer], feed_dict=feed)
if iteration%5==0:
print("Epoch: {}/{}".format(e, epochs),
"Iteration: {}".format(iteration),
"Train loss: {:.3f}".format(loss))
if iteration%25==0:
val_acc = []
val_state = sess.run(cell.zero_state(batch_size, tf.float32))
for x, y in get_batches(x_val, y_val, batch_size):
feed = {inputs_: x,
labels_: y[:, None],
keep_prob: 1,
initial_state: val_state}
batch_acc, val_state = sess.run([accuracy, final_state], feed_dict=feed)
val_acc.append(batch_acc)
print("Val acc: {:.3f}".format(np.mean(val_acc)))
iteration +=1
saver.save(sess, "checkpoints/isreturn.ckpt")
# x_train, y_train = getTrainingData(dh.DATE)
#
# x_test, y_test = getTrainingData(date(2017,8,7))
createLSTM()
|
ajZiiiN/honest-blackops
|
src/createTrainDataRNN.py
|
Python
|
mit
| 4,969
|
from __future__ import absolute_import
# -*- coding: utf-8 -*-
import sunpy
def test_sysinfo():
output = sunpy.util.get_sys_dict()
assert isinstance(output, dict)
|
Alex-Ian-Hamilton/sunpy
|
sunpy/util/tests/test_sysinfo.py
|
Python
|
bsd-2-clause
| 175
|
# _*_ encoding: utf-8 _*_
"""Demonstrate doubly-linked list in python."""
from linked_list import Node
class DoublyLinked(object):
"""Implement a doubly-linked list from a singly-linked list."""
def __init__(self, val=None):
"""Initialize the list."""
self.head = object()
self._mark = self.head
if val:
self.insert(val)
def size(self):
"""Return the length of the list."""
counter = 0
current_node = self.head
while current_node is not self._mark:
counter += 1
current_node = current_node.get_next()
return counter
def search(self, val):
"""Return the node containing 'val' in list if exists, else None."""
current_node = self.head
while current_node.get_data() is not val:
current_node = current_node.get_next()
if current_node is self._mark:
raise IndexError
break
return current_node
def insert(self, val):
"""Insert value at head of list."""
if isinstance(val, list):
for item in val[::-1]:
new_node = DoubleNode(item, self.head, self._mark)
try:
self.head.set_previous(new_node)
except AttributeError:
pass
self.head = new_node
else:
new_node = DoubleNode(val, self.head, self._mark)
try:
self.head.set_previous(new_node)
except AttributeError:
pass
self.head = new_node
def display(self):
"""Print list represented as Python tuple literal."""
output = """"""
current_node = self.head
while current_node is not self._mark:
output += '{}, '.format(current_node.get_data())
current_node = current_node.get_next()
printable = '(' + output[:-2] + ')'
print(printable)
return printable
def pop(self):
"""Pop the first value off the head of the list and return it."""
item = self.head
if item is self._mark:
raise IndexError
else:
self.head = item.get_next()
try:
self.head.set_previous(self._mark)
except AttributeError:
pass
return item.get_data()
def append(self, val):
"""Append the given item to the tail of the list."""
cur = self.head
if cur == self._mark:
new_node = DoubleNode(val, self._mark)
self.head = new_node
else:
new_node = DoubleNode(val, self._mark)
while cur.next_node != self._mark:
cur = cur.next_node
cur.set_next(new_node)
new_node.set_previous(cur)
def shift(self):
"""Remove and returns the last value from the tail of the list."""
cur = self.head
if cur == self._mark:
raise IndexError
else:
while cur.next_node != self._mark:
cur = cur.next_node
try:
cur.prev_node.next_node = self._mark
except AttributeError:
raise IndexError
return cur.get_data()
def remove(self, value):
"""Remove the first occurrence of value in the list."""
previous_node = None
current_node = self.head
while current_node.get_data() is not value:
previous_node = current_node
current_node = current_node.get_next()
if current_node.get_data() is None:
break
if current_node.get_data() == value:
previous_node.set_next(current_node.get_next())
else:
print('Not Found')
class DoubleNode(object):
"""Double Node constructor for doubly linked list."""
def __init__(self, data=None, next_node=None, prev_node=None):
"""Initialize the double node."""
self.data = data
self.prev_node = prev_node
self.next_node = next_node
def set_previous(self, prev):
"""Set previous node."""
self.prev_node = prev
def get_data(self):
"""Get data for node."""
return self.data
def get_next(self):
"""Retrieve next node in list."""
return self.next_node
def set_next(self, next_node):
"""Set next node in list."""
self.next_node = next_node
|
palindromed/data-structures2
|
src/doubly_linked.py
|
Python
|
mit
| 4,501
|
##
## For help on setting up your machine and configuring this TestScript go to
## http://docs.bitbar.com/testing/appium/
##
import os
import time
import unittest
from time import sleep
from appium import webdriver
from device_finder import DeviceFinder
def log(msg):
print (time.strftime("%H:%M:%S") + ": " + msg)
class TestdroidIOS(unittest.TestCase):
"""
Take screenshot and store files to defined location, with numbering prefix
:Args:
- name - files are stored as #_name
"""
def screenshot(self, name):
screenshot_name = str(self.screenshot_count) + "_" + name + ".png"
log("Taking screenshot: " + screenshot_name)
self.driver.save_screenshot(self.screenshot_dir + "/" + screenshot_name)
self.screenshot_count += 1
def setUp(self):
##
## IMPORTANT: Set the following parameters.
##
testdroid_url = os.environ.get('TESTDROID_URL') or "https://cloud.bitbar.com"
appium_url = os.environ.get('TESTDROID_APPIUM_URL') or 'https://appium.bitbar.com/wd/hub'
testdroid_apiKey = os.environ.get('TESTDROID_APIKEY') or ""
testdroid_project_name = os.environ.get('TESTDROID_PROJECT') or "iOS sample project"
testdroid_testrun_name = os.environ.get('TESTDROID_TESTRUN') or "My testrun"
testdroid_app = os.environ.get('TESTDROID_APP') or ""
testdroid_bundle_id = os.environ.get('TESTDROID_BUNDLE_ID') or "com.bitbar.testdroid.BitbarIOSSample"
new_command_timeout = os.environ.get('TESTDROID_CMD_TIMEOUT') or '60'
testdroid_test_timeout = os.environ.get('TESTDROID_TEST_TIMEOUT') or '600'
testdroid_find_device = os.environ.get('TESTDROID_FINDDEVICE') or "true"
automation_name = os.environ.get('TESTDROID_AUTOMATION_NAME') or "XCUITest"
self.screenshot_dir = os.environ.get('TESTDROID_SCREENSHOTS') or os.getcwd() + "/screenshots"
log("Will save screenshots at: " + self.screenshot_dir)
self.screenshot_count = 1
# Options to select device
# 1) Set environment variable TESTDROID_DEVICE
# 2) Set device name to this python script
# 3) Do not set #1 and #2 and let DeviceFinder to find free device for you
testdroid_device = os.environ.get('TESTDROID_DEVICE') or ""
deviceFinder = DeviceFinder(url=testdroid_url)
if testdroid_device == "":
# Loop will not exit until free device is found
while testdroid_device == "":
testdroid_device = deviceFinder.available_ios_device()
print "Starting Appium test using device '%s'" % testdroid_device
desired_capabilities_cloud = {}
desired_capabilities_cloud['testdroid_apiKey'] = testdroid_apiKey
desired_capabilities_cloud['testdroid_target'] = 'ios'
desired_capabilities_cloud['testdroid_project'] = testdroid_project_name
desired_capabilities_cloud['testdroid_testrun'] = testdroid_testrun_name
desired_capabilities_cloud['testdroid_device'] = testdroid_device
desired_capabilities_cloud['testdroid_app'] = testdroid_app
desired_capabilities_cloud['platformName'] = 'iOS'
desired_capabilities_cloud['deviceName'] = 'iPhone device'
desired_capabilities_cloud['newCommandTimeout'] = new_command_timeout
desired_capabilities_cloud['testdroid_testTimeout'] = testdroid_test_timeout
desired_capabilities_cloud['testdroid_findDevice'] = testdroid_find_device
desired_capabilities_cloud['automationName'] = automation_name
desired_capabilities_cloud['app'] = testdroid_bundle_id
# set up webdriver
log("WebDriver request initiated. Waiting for response, this typically takes 2-3 mins")
self.driver = webdriver.Remote(command_executor=appium_url, desired_capabilities=desired_capabilities_cloud)
log("WebDriver response received")
def tearDown(self):
log("Quitting")
self.driver.quit()
def testSample(self):
# view1
log("view1: Finding buttons")
buttons = self.driver.find_elements_by_class_name('UIAButton')
log("view1: Clicking button [0] - RadioButton 1")
buttons[0].click()
log("view1: Typing in textfield[0]: Testdroid user")
elem = self.driver.find_element_by_class_name('UIATextField')
elem.clear()
elem.send_keys('Testdroid user')
log("view1: Taking screenshot screenshot1.png")
self.screenshot("screenshot1")
log("view1: Hiding Keyboard")
self.driver.find_element_by_xpath("//*[contains(@name, 'Return')]").click()
log("view1: Taking screenshot screenshot2.png")
self.screenshot("screenshot2")
log("view1: Clicking button[6] - OK Button")
buttons[6].click()
log("view2: Taking screenshot screenshot3.png")
self.screenshot("screenshot3")
# view2
log("view2: Finding buttons")
buttons = self.driver.find_elements_by_class_name('UIAButton')
log("view2: Clicking button[0] - Back/OK button")
buttons[0].click()
# view1
log("view1: Finding buttons")
buttons = self.driver.find_elements_by_class_name('UIAButton')
log("view1: Clicking button[2] - RadioButton 2")
buttons[2].click()
log("view1: Clicking button[6] - OK Button")
buttons[6].click()
log("view1: Taking screenshot screenshot4.png")
self.screenshot("screenshot4")
log("view1: Sleeping 3 before quitting webdriver.")
sleep(3)
def initialize():
return TestdroidIOS
if __name__ == "__main__":
suite = unittest.TestLoader().loadTestsFromTestCase(TestdroidIOS)
unittest.TextTestRunner(verbosity=2).run(suite)
|
aknackiron/testdroid-samples
|
appium/sample-scripts/python/testdroid_ios.py
|
Python
|
apache-2.0
| 5,790
|
import cv2
import numpy as np
def skinExtract(img):
ycbcr = cv2.cvtColor(img, cv2.COLOR_RGB2YCrCb)
[y, cb, cr] = cv2.split(ycbcr)
skin = np.zeros((img.shape[:2]), np.uint8)
for x in range(img.shape[0]):
for y in range(img.shape[1]):
# 肤色大致范围
if(138 <= cr[x, y] and cr[x, y] <=170 and 100 <= cb[x, y] and cb[x, y] <= 127):
skin[x, y] = 255
else:
skin[x, y] = 0
# 膨胀去除空洞,腐蚀消除突起
skin = cv2.dilate(skin, np.ones((15, 15), np.uint8))
skin = cv2.erode(skin, np.ones((15, 15), np.uint8))
return skin
if __name__ == "__main__":
im = cv2.imread("../Images/hand.jpg", cv2.IMREAD_COLOR)
if (im.shape[0] == 0):
print("没有图片 hand.jpg")
exit(1)
im = cv2.GaussianBlur(im, (3, 3), 1)
skin = skinExtract(im)
im2 = im; # 后面显示
cv2.imshow("skin", skin)
im, contours, hierarchy = cv2.findContours(skin, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
# 找到最大的轮廓
index = 0
maxArea = 0.0
for i in range(len(contours)):
area = cv2.contourArea(contours[i])
if (area > maxArea):
maxArea = area
index = i
# 计算矩,找重心
print ("maxArea:", maxArea)
mo = cv2.moments(skin, True)
center = (int(mo["m10"] / mo["m00"]), int(mo["m01"] / mo["m00"]))
cv2.circle(im2, center, 8, (0, 0, 255), -1)
# 寻找指尖
cou = contours[index][0]
maxM = 0
count = 0
notice = 0
fingerTips = list()
for i in range(20, len(cou) - 20, 2):
q = cou[i - 20]; p = cou[i]; r = cou[i + 20]
pqx = p[0] - q[0]; pqy = p[1] - q[1]
prx = p[0] - r[0]; pry = p[1] - r[1]
dot = pqx * pqy + prx + pry
if (dot < -20 or dot > 20):
cross = pqx * pry - prx* pqy
if cross > 0:
fingerTips.append(p)
cv2.circle(im2, tuple(p), 5, (255, 0, 0), cv2.FILLED)
cv2.line(im2, center, p, (255, 0, 0), 2)
skin = cv2.drawContours(skin, contours[index], 0, (0, 255, 255))
cv2.imshow("contours", skin)
cv2.imshow("show", im2)
while (cv2.waitKey() & 0xFF != ord('q')):
pass
# return fingerTips
# 失败
|
Ginkgo-Biloba/Misc-Python
|
opencv/009FigerTips.py
|
Python
|
gpl-3.0
| 1,994
|
from os import listdir, open as os_open, close as os_close, write as os_write, O_RDWR, O_NONBLOCK
from fcntl import ioctl
from boxbranding import getBoxType, getBrandOEM
import struct
from config import config, ConfigSubsection, ConfigInteger, ConfigYesNo, ConfigText, ConfigSlider
from Tools.Directories import pathExists
boxtype = getBoxType()
# asm-generic/ioctl.h
IOC_NRBITS = 8L
IOC_TYPEBITS = 8L
IOC_SIZEBITS = 13L
IOC_DIRBITS = 3L
IOC_NRSHIFT = 0L
IOC_TYPESHIFT = IOC_NRSHIFT+IOC_NRBITS
IOC_SIZESHIFT = IOC_TYPESHIFT+IOC_TYPEBITS
IOC_DIRSHIFT = IOC_SIZESHIFT+IOC_SIZEBITS
IOC_READ = 2L
def EVIOCGNAME(length):
return (IOC_READ<<IOC_DIRSHIFT)|(length<<IOC_SIZESHIFT)|(0x45<<IOC_TYPESHIFT)|(0x06<<IOC_NRSHIFT)
class inputDevices:
def __init__(self):
self.Devices = {}
self.currentDevice = ""
self.getInputDevices()
def getInputDevices(self):
devices = listdir("/dev/input/")
for evdev in devices:
try:
buffer = "\0"*512
self.fd = os_open("/dev/input/" + evdev, O_RDWR | O_NONBLOCK)
self.name = ioctl(self.fd, EVIOCGNAME(256), buffer)
self.name = self.name[:self.name.find("\0")]
if str(self.name).find("Keyboard") != -1:
self.name = 'keyboard'
os_close(self.fd)
except (IOError,OSError), err:
print '[iInputDevices] getInputDevices <ERROR: ioctl(EVIOCGNAME): ' + str(err) + ' >'
self.name = None
if self.name:
self.Devices[evdev] = {'name': self.name, 'type': self.getInputDeviceType(self.name),'enabled': False, 'configuredName': None }
if boxtype.startswith('et'):
self.setDefaults(evdev) # load default remote control "delay" and "repeat" values for ETxxxx ("QuickFix Scrollspeed Menues" proposed by Xtrend Support)
def getInputDeviceType(self,name):
if "remote control" in name:
return "remote"
elif "keyboard" in name:
return "keyboard"
elif "mouse" in name:
return "mouse"
else:
print "Unknown device type:",name
return None
def getDeviceName(self, x):
if x in self.Devices.keys():
return self.Devices[x].get("name", x)
else:
return "Unknown device name"
def getDeviceList(self):
return sorted(self.Devices.iterkeys())
def setDeviceAttribute(self, device, attribute, value):
#print "[iInputDevices] setting for device", device, "attribute", attribute, " to value", value
if self.Devices.has_key(device):
self.Devices[device][attribute] = value
def getDeviceAttribute(self, device, attribute):
if self.Devices.has_key(device):
if self.Devices[device].has_key(attribute):
return self.Devices[device][attribute]
return None
def setEnabled(self, device, value):
oldval = self.getDeviceAttribute(device, 'enabled')
#print "[iInputDevices] setEnabled for device %s to %s from %s" % (device,value,oldval)
self.setDeviceAttribute(device, 'enabled', value)
if oldval is True and value is False:
self.setDefaults(device)
def setName(self, device, value):
#print "[iInputDevices] setName for device %s to %s" % (device,value)
self.setDeviceAttribute(device, 'configuredName', value)
#struct input_event {
# struct timeval time; -> ignored
# __u16 type; -> EV_REP (0x14)
# __u16 code; -> REP_DELAY (0x00) or REP_PERIOD (0x01)
# __s32 value; -> DEFAULTS: 700(REP_DELAY) or 100(REP_PERIOD)
#}; -> size = 16
def setDefaults(self, device):
print "[iInputDevices] setDefaults for device %s" % device
self.setDeviceAttribute(device, 'configuredName', None)
event_repeat = struct.pack('iihhi', 0, 0, 0x14, 0x01, 100)
event_delay = struct.pack('iihhi', 0, 0, 0x14, 0x00, 700)
fd = os_open("/dev/input/" + device, O_RDWR)
os_write(fd, event_repeat)
os_write(fd, event_delay)
os_close(fd)
def setRepeat(self, device, value): #REP_PERIOD
if self.getDeviceAttribute(device, 'enabled'):
print "[iInputDevices] setRepeat for device %s to %d ms" % (device,value)
event = struct.pack('iihhi', 0, 0, 0x14, 0x01, int(value))
fd = os_open("/dev/input/" + device, O_RDWR)
os_write(fd, event)
os_close(fd)
def setDelay(self, device, value): #REP_DELAY
if self.getDeviceAttribute(device, 'enabled'):
print "[iInputDevices] setDelay for device %s to %d ms" % (device,value)
event = struct.pack('iihhi', 0, 0, 0x14, 0x00, int(value))
fd = os_open("/dev/input/" + device, O_RDWR)
os_write(fd, event)
os_close(fd)
class InitInputDevices:
def __init__(self):
self.currentDevice = ""
self.createConfig()
def createConfig(self, *args):
config.inputDevices = ConfigSubsection()
for device in sorted(iInputDevices.Devices.iterkeys()):
self.currentDevice = device
#print "[InitInputDevices] -> creating config entry for device: %s -> %s " % (self.currentDevice, iInputDevices.Devices[device]["name"])
self.setupConfigEntries(self.currentDevice)
self.currentDevice = ""
def inputDevicesEnabledChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setEnabled(self.currentDevice, configElement.value)
elif iInputDevices.currentDevice != "":
iInputDevices.setEnabled(iInputDevices.currentDevice, configElement.value)
def inputDevicesNameChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setName(self.currentDevice, configElement.value)
if configElement.value != "":
devname = iInputDevices.getDeviceAttribute(self.currentDevice, 'name')
if devname != configElement.value:
cmd = "config.inputDevices." + self.currentDevice + ".enabled.value = False"
exec cmd
cmd = "config.inputDevices." + self.currentDevice + ".enabled.save()"
exec cmd
elif iInputDevices.currentDevice != "":
iInputDevices.setName(iInputDevices.currentDevice, configElement.value)
def inputDevicesRepeatChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setRepeat(self.currentDevice, configElement.value)
elif iInputDevices.currentDevice != "":
iInputDevices.setRepeat(iInputDevices.currentDevice, configElement.value)
def inputDevicesDelayChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setDelay(self.currentDevice, configElement.value)
elif iInputDevices.currentDevice != "":
iInputDevices.setDelay(iInputDevices.currentDevice, configElement.value)
def setupConfigEntries(self,device):
cmd = "config.inputDevices." + device + " = ConfigSubsection()"
exec cmd
if boxtype == 'dm800' or boxtype == 'azboxhd':
cmd = "config.inputDevices." + device + ".enabled = ConfigYesNo(default = True)"
else:
cmd = "config.inputDevices." + device + ".enabled = ConfigYesNo(default = False)"
exec cmd
cmd = "config.inputDevices." + device + ".enabled.addNotifier(self.inputDevicesEnabledChanged,config.inputDevices." + device + ".enabled)"
exec cmd
cmd = "config.inputDevices." + device + '.name = ConfigText(default="")'
exec cmd
cmd = "config.inputDevices." + device + ".name.addNotifier(self.inputDevicesNameChanged,config.inputDevices." + device + ".name)"
exec cmd
if boxtype in ('maram9', 'classm', 'axodin', 'axodinc', 'starsatlx', 'genius', 'evo', 'galaxym6'):
cmd = "config.inputDevices." + device + ".repeat = ConfigSlider(default=400, increment = 10, limits=(0, 500))"
elif boxtype == 'azboxhd':
cmd = "config.inputDevices." + device + ".repeat = ConfigSlider(default=150, increment = 10, limits=(0, 500))"
else:
cmd = "config.inputDevices." + device + ".repeat = ConfigSlider(default=100, increment = 10, limits=(0, 500))"
exec cmd
cmd = "config.inputDevices." + device + ".repeat.addNotifier(self.inputDevicesRepeatChanged,config.inputDevices." + device + ".repeat)"
exec cmd
if boxtype in ('maram9', 'classm', 'axodin', 'axodinc', 'starsatlx', 'genius', 'evo', 'galaxym6'):
cmd = "config.inputDevices." + device + ".delay = ConfigSlider(default=200, increment = 100, limits=(0, 5000))"
else:
cmd = "config.inputDevices." + device + ".delay = ConfigSlider(default=700, increment = 100, limits=(0, 5000))"
exec cmd
cmd = "config.inputDevices." + device + ".delay.addNotifier(self.inputDevicesDelayChanged,config.inputDevices." + device + ".delay)"
exec cmd
iInputDevices = inputDevices()
config.plugins.remotecontroltype = ConfigSubsection()
config.plugins.remotecontroltype.rctype = ConfigInteger(default = 0)
class RcTypeControl():
def __init__(self):
if pathExists('/proc/stb/ir/rc/type') and pathExists('/proc/stb/info/boxtype') and getBrandOEM() not in ('gigablue', 'odin', 'ini', 'entwopia', 'tripledot'):
self.isSupported = True
fd = open('/proc/stb/info/boxtype', 'r')
self.boxType = fd.read()
fd.close()
if config.plugins.remotecontroltype.rctype.value != 0:
self.writeRcType(config.plugins.remotecontroltype.rctype.value)
else:
self.isSupported = False
def multipleRcSupported(self):
return self.isSupported
def getBoxType(self):
return self.boxType
def writeRcType(self, rctype):
fd = open('/proc/stb/ir/rc/type', 'w')
fd.write('%d' % rctype)
fd.close()
iRcTypeControl = RcTypeControl()
|
devclone/enigma2-9f38fd6
|
lib/python/Components/InputDevice.py
|
Python
|
gpl-2.0
| 9,159
|
import re
import json
from xbmcswift2 import xbmc, xbmcgui, xbmcvfs
from meta import plugin
from meta.gui import dialogs
from meta.utils.text import to_unicode
from settings import SETTING_AUTOPATCH, SETTING_AUTOPATCHES
from language import get_string as _
EXTENSION = ".metalliq.json"
HTML_TAGS_REGEX = re.compile(r'\[/?(?:color|b|i|u).*?\]', re.I|re.UNICODE)
class AddonPlayer(object):
def __init__(self, filename, media, meta):
self.media = media
self.title = meta["name"]
self.id = meta.get("id", filename.replace(".metalliq.json", ""))
self.clean_title = HTML_TAGS_REGEX.sub('', self.title)
self.repoid = meta.get("repository")
self.pluginid = meta.get("plugin")
self.order = meta.get("priority") or 1000
self.filters = meta.get("filters", {})
self.commands = meta.get(media, [])
self._postprocess = meta.get("postprocess")
def postprocess(self, link):
code = self._postprocess
if not code or not isinstance(code, basestring) or "__" in code:
return link
link = eval(code, {"__builtins__": {}, "link": link})
return link
def is_empty(self):
if self.pluginid and not xbmc.getCondVisibility('System.HasAddon(%s)' % self.pluginid):
return True
return not bool(self.commands)
def get_players(media, filters = {}):
assert media in ("tvshows", "movies", "musicvideos", "music", "live")
players = []
players_path = "special://profile/addon_data/{0}/players/".format(plugin.id)
files = [x for x in xbmcvfs.listdir(players_path)[1] if x.endswith(EXTENSION)]
for file in files:
path = players_path + file
try:
f = xbmcvfs.File(path)
try:
content = f.read()
meta = json.loads(content)
finally:
f.close()
player = AddonPlayer(file, media, meta)
if not player.is_empty():
players.append(player)
except Exception, e:
plugin.log.error(repr(e))
msg = "player %s is invalid" % file
xbmcgui.Dialog().ok('Invalid player', msg)
raise
return sort_players(players, filters)
def sort_players(players, filters = {}):
result = []
for player in players:
filtered = False
checked = False
for filter_key, filter_value in filters.items():
value = player.filters.get(filter_key)
if value:
checked = True
if to_unicode(value) != to_unicode(filter_value):
filtered = True
if not filtered:
needs_browsing = False
for command_group in player.commands:
for command in command_group:
if command.get('steps'):
needs_browsing = True
break
result.append((not checked, needs_browsing, player.order, player.clean_title.lower(), player))
result.sort()
return [x[-1] for x in result]
def get_needed_langs(players):
languages = set()
for player in players:
for command_group in player.commands:
for command in command_group:
command_lang = command.get("language", "en")
languages.add(command_lang)
return languages
ADDON_SELECTOR = AddonPlayer("selector", "any", meta={"name": "Selector"})
ADDON_CONTEXT = AddonPlayer("context", "any", meta={"name": "Context"})
ADDON_DEFAULT = AddonPlayer("default", "any", meta={"name": "Default"})
@plugin.route('/patch/<mode>', options = {"mode": "all"})
def patch(mode):
import xbmcaddon
adir = "special://home/addons/"
AUTOS = eval(plugin.get_setting(SETTING_AUTOPATCHES, unicode))
# try: AUTOS = plugin.get_setting(SETTING_AUTOPATCHES, unicode)
# except: AUTOS = [[], [], [], []]
# return [p for p in get_players() if p.id in AUTOS]
# xbmc.log("QQQQQ AUTOS = {0}".format(str(AUTOS)), xbmc.LOGNOTICE)
INSTALLED = [i for i in xbmcvfs.listdir(adir)[0]]
PATCHES = [[], ["resources/lib/modules/control.py", "pass", "sys.exit()"], ["default.py", "", "\n cool_down_active = kodi.get_setting('cool_down') == 'true'\n if not salts_utils.is_salts() or cool_down_active:\n kodi.notify(msg=i18n('playback_limited'))\n return False"], ["lib/dudehere/routines/scrapers/__init__.py", "", "\n\t\tif self._caller not in ALLOWED_CALLERS and self._caller: \n\t\t\tplugin.log('Caller not allowed')\n\t\t\tplugin.raise_error('Violation', 'This addon is not allowed.', 'Please do not use %s with %s' % (self._caller, ADDON_NAME))\n\t\t\tif return_sources:\n\t\t\t\treturn [], [], []\n\t\t\telse:\n\t\t\t\treturn []"]]
if mode == "auto":
if AUTOS != [[], [], [], []]:
ADDONS = AUTOS
else:
if dialogs.yesno('{0}: Patch'.format(plugin.name), '{0}.[CR]{1} & {2}'.format(_("%s not found") % 'Auto-patches', _("Enable"), _("Continue?"))): return patch("all")
else:
plugin.set_setting(SETTING_AUTOPATCH, "false")
return
else:
ADDONS = [[], [i for i in INSTALLED if i.startswith("plugin.video.") and xbmcvfs.exists("{0}{1}/{2}".format(adir, i, PATCHES[1][0]))], [i for i in INSTALLED if i.startswith("plugin.video.") and xbmcvfs.exists("{0}{1}/{2}".format(adir, i, PATCHES[2][0]))], [i for i in INSTALLED if i.startswith("script.module.") and xbmcvfs.exists("{0}{1}/{2}".format(adir, i, PATCHES[3][0]))]]
count = 0
for i in range(1, len(ADDONS)):
for a in ADDONS[i]:
count = count + 1
b = "{0}{1}/{2}".format(adir, a, PATCHES[i][0])
c = xbmcvfs.File(b)
d = c.read()
c.close()
if PATCHES[i][2] in d:
ADDON = xbmcaddon.Addon(a)
if mode == "auto" or dialogs.yesno('{0}: Patch "{1}"?'.format(plugin.name, ADDON.getAddonInfo("name")), '"{0}" {1} block-code.[CR]{2}'.format(ADDON.getAddonInfo("name"), _("contains"), _("Would you like to remove it from the library?").replace(_("Library").lower(), _("Add-on").lower()))):
h = xbmcvfs.File(b, 'w')
d = d.replace(PATCHES[i][2], PATCHES[i][1])
result = h.write(d)
h.close()
if mode != "auto" and dialogs.yesno("{0}: {1} Patch?".format(plugin.name, _("Auto")), '"{0}"[CR]{1} {2} re-patching?'.format(ADDON.getAddonInfo("name"), _("Enable"), _("Auto").lower())):
if ADDON.getAddonInfo("id") not in AUTOS[i]: AUTOS[i].append(ADDON.getAddonInfo("id"))
if AUTOS != [[], [], [], []] and AUTOS != ADDONS:
plugin.set_setting(SETTING_AUTOPATCHES, AUTOS)
|
TheWardoctor/Wardoctors-repo
|
plugin.video.metalliq/resources/lib/meta/play/players.py
|
Python
|
apache-2.0
| 6,779
|
class Graph:
def __init__(self, data):
self.data = data
def node(self, i):
if i in self.data:
es = self.data[i]
if es :
return Node(es)
else:
pass
def nodes(self):
return Nodes(self.data)
class Nodes:
def __init__(self, data):
self.data = data
def __iter__(self):
return NodeIter(self.data, 0)
class NodeIter:
def __init__(self, data, pos):
self.data = data
self.keys = list(data.keys())
self.pos = pos
self.l = len(data)
def __next__(self):
p = self.pos
self.pos = self.pos +1
if p < self.l:
d = self.data[self.keys[p]]
if len(d) > 1 :
return Node(d)
else:
raise StopIteration
class Node :
"""
Handle a node
"""
def __init__(self, data):
self.data = data
def typename (self):
return self.data[0]
def fields (self):
return Fields(self.data[1])
class Fields :
def __init__(self, data):
self.data = data
def field(self, i):
return Field(self.data[i])
def __iter__(self):
return FieldIter(self.data, 0)
class FieldIter:
def __init__(self, data, pos):
self.data = data
self.pos = pos
self.l = len(data)
def __next__(self):
p = self.pos
self.pos = self.pos +1
if p < self.l:
d = self.data[p]
if len(d) > 1 :
return Field(d)
else:
return FieldTodo(d)
else:
raise StopIteration
class Field :
def __init__(self, data):
self.data = data
def name(self):
return self.data[0]
def value(self):
return self.data[1]
class FieldTodo :
def __init__(self, data):
self.data = data
def name(self):
return "TODO"
def value(self):
return self.data[0]
|
h4ck3rm1k3/gcc_py_introspector
|
gcc/tree/graph.py
|
Python
|
gpl-2.0
| 1,994
|
"""
Cache API class.
"""
from __future__ import print_function
import os
import shutil
from dbcollection.core.manager import CacheManager
from dbcollection.utils import nested_lookup
def cache(query=(), delete_cache=False, delete_cache_dir=False, delete_cache_file=False,
reset_cache=False, reset_path_cache=False, reset_path_downloads=False,
set_cache_dir='', set_downloads_dir='', verbose=True):
"""Configures the cache file.
This method allows to configure some options of the cache file. The
available options are only a subset of all possible parameters/registries
of the cache file. These provides the most common operations one may need
to perform. For more specific configurations, manually modifying the cache
file or using the cache manager methods is the best procedure.
The most common operations one might need to perform are setting, deleting
or reseting the cache file itself and/or the existing data. (The input args
for these operations should be self-explanatory just by looking at their
name description.)
Additionally, performing basic queries to the cache is supported via the
'query' input arg. To search for a particular pattern (e.g., a dataset or
task) just assign the 'query' to the pattern you are looking for.
Parameters
----------
query : str/list/tuple, optional
Pattern or list of patterns to search for in the cache file.
delete_cache : bool, optional
Delete/remove the dbcollection cache file + directory.
delete_cache_dir : bool, optional
Delete/remove the dbcollection cache directory.
delete_cache_file : bool, optional
Delete/remove the dbcollection.json cache file.
reset_cache : bool, optional
Reset the cache file.
reset_cache_dir_path : bool, optional
Reset the cache dir path to the default path.
reset_cache_downloads_path : bool, optional
Reset the downloads dir path to the default path.
set_cache_dir_path : str, optional
New path for the cache dir.
set_downloads_dir : str, optional
New path for the downloads dir.
verbose : bool, optional
Displays text information (if true).
Returns
-------
str/list
Pattern or list of patterns matching the input query pattern.
Examples
--------
Delete the cache by removing the dbcollection.json cache file.
This will NOT remove the file contents in dbcollection/. For that,
you must set the *delete_cache_dir* argument to True.
>>> import dbcollection as dbc
>>> dbc.cache(delete_cache_file=True)
"""
if isinstance(query, str):
query = (query, )
else:
query = tuple(query)
cache = CacheAPI(query=query,
delete_cache=delete_cache,
delete_cache_dir=delete_cache_dir,
delete_cache_file=delete_cache_file,
reset_cache=reset_cache,
reset_path_cache=reset_path_cache,
reset_path_downloads=reset_path_downloads,
set_cache_dir=set_cache_dir,
set_downloads_dir=set_downloads_dir,
verbose=verbose)
return cache.run()
class CacheAPI(object):
"""Cache configuration API class.
This class contains methods to configure the
cache registry. Also, it can remove the cache
files from disk if needed.
Parameters
----------
query : tuple
List of patterns to search for in the cache file.
delete_cache : bool
Delete/remove the dbcollection cache file + directory.
delete_cache_dir : bool
Delete/remove the dbcollection cache directory.
delete_cache_file : bool
Delete/remove the dbcollection.json cache file.
reset_cache : bool
Reset the cache file.
reset_cache_dir_path : bool
Reset the cache dir path to the default path.
reset_cache_downloads_path : bool
Reset the downloads dir path to the default path.
set_cache_dir_path : str
New path for the cache dir.
set_downloads_dir : str
New path for the downloads dir.
verbose : bool
Displays text information (if true).
Attributes
----------
query : tuple
List of patterns to search for in the cache file.
delete_cache : bool
Delete/remove the dbcollection cache file + directory.
delete_cache_dir : bool
Delete/remove the dbcollection cache directory.
delete_cache_file : bool
Delete/remove the dbcollection.json cache file.
reset_cache : bool
Reset the cache file.
reset_cache_dir_path : bool
Reset the cache dir path to the default path.
reset_cache_downloads_path : bool, optional
Reset the downloads dir path to the default path.
set_cache_dir_path : str
New path for the cache dir.
set_downloads_dir : str
New path for the downloads dir.
verbose : bool
Displays text information (if true).
cache_manager : CacheManager
Cache manager object.
"""
def __init__(self, query, delete_cache, delete_cache_dir, delete_cache_file,
reset_cache, reset_path_cache, reset_path_downloads,
set_cache_dir, set_downloads_dir, verbose):
"""Initialize class."""
assert isinstance(query, tuple), "Must input a valid query."
assert isinstance(delete_cache, bool), "Must input a valid boolean for delete_cache."
assert isinstance(delete_cache_dir, bool), "Must input a valid boolean for delete_cache_dir."
assert isinstance(delete_cache_file, bool), "Must input a valid boolean for delete_cache_file."
assert isinstance(reset_cache, bool), "Must input a valid boolean for reset_cache."
assert isinstance(reset_path_cache, bool), "Must input a valid boolean for reset_path_cache."
assert isinstance(reset_path_downloads, bool), "Must input a valid boolean for reset_path_downloads."
assert isinstance(set_cache_dir, str), "Must input a valid string for set_cache_dir."
assert isinstance(set_downloads_dir, str), "Must input a valid string for set_downloads_dir."
assert isinstance(verbose, bool), "Must input a valid boolean for verbose."
self.query = query
self.delete_cache = delete_cache
self.delete_cache_dir = delete_cache_dir
self.delete_cache_file = delete_cache_file
self.reset_cache = reset_cache
self.reset_path_cache = reset_path_cache
self.reset_path_downloads = reset_path_downloads
self.set_cache_dir = set_cache_dir
self.set_downloads_dir = set_downloads_dir
self.verbose = verbose
self.cache_manager = self.get_cache_manager()
def get_cache_manager(self):
return CacheManager()
def run(self):
"""Main method."""
if any(self.query):
result = self.get_matching_metadata_from_cache(self.query)
if self.verbose:
print('==> Patterns found in cache:')
for i, pattern in enumerate(self.query):
print(' - {}: {} found'.format(pattern, len(result[i])))
return result
if self.delete_cache_dir or self.delete_cache:
self.remove_cache_dir_from_disk()
if self.verbose:
print('Deleted {} directory.'.format(self.get_cache_dir()))
if self.delete_cache_file or self.delete_cache:
self.remove_cache_file_from_disk()
if self.verbose:
print('Deleted {} cache file.'.format(self.get_cache_filename()))
if self.reset_cache:
self.reset_cache_file()
if self.verbose:
print('Cache reseted.')
if self.reset_path_cache and not self.reset_cache:
self.reset_cache_dir_path()
if self.verbose:
print('Reseted the cache dir path: {}.'.format(self.get_cache_dir()))
if self.reset_path_downloads and not self.reset_cache:
self.reset_download_dir_path()
if self.verbose:
print('Reseted the download dir path: {}.'.format(self.get_download_dir()))
if any(self.set_cache_dir):
self.set_cache_dir_path()
if self.verbose:
print('New cache dir path: {}.'.format(self.get_cache_dir()))
if any(self.set_downloads_dir):
self.set_download_dir_path()
if self.verbose:
print('New download dir path: {}.'.format(self.get_download_dir()))
def get_matching_metadata_from_cache(self, patterns):
"""Returns a list of matching patterns from the cache."""
found = []
for pattern in patterns:
if any(pattern):
result = self.get_matching_pattern_from_cache(pattern)
else:
result = []
found.append(result)
return found
def get_matching_pattern_from_cache(self, pattern):
"""Returns data from cache that matches the pattern."""
data = self.get_cache_data()
results = self.find_pattern_in_dict(data, pattern)
out = self.add_key_to_results(results, pattern)
return out
def get_cache_data(self):
return self.cache_manager.manager.data
def find_pattern_in_dict(self, data, pattern):
return list(nested_lookup(key=pattern, document=data, wild=True))
def add_key_to_results(self, results, pattern):
return [{pattern: result} for result in results]
def remove_cache_dir_from_disk(self):
cache_dir = self.get_cache_dir()
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
def get_cache_dir(self):
return self.cache_manager.manager.cache_dir
def remove_cache_file_from_disk(self):
cache_filename = self.get_cache_filename()
if os.path.exists(cache_filename):
os.remove(cache_filename)
def get_cache_filename(self):
return self.cache_manager.manager.cache_filename
def reset_cache_file(self):
self.cache_manager.manager.reset_cache(force_reset=True)
def reset_cache_dir_path(self):
self.cache_manager.manager.reset_cache_dir()
def reset_download_dir_path(self):
self.cache_manager.manager.reset_download_dir()
def get_download_dir(self):
return self.cache_manager.manager.download_dir
def set_cache_dir_path(self):
self.cache_manager.manager.cache_dir = self.set_cache_dir
def set_download_dir_path(self):
self.cache_manager.manager.download_dir = self.set_downloads_dir
|
dbcollection/dbcollection
|
dbcollection/core/api/cache.py
|
Python
|
mit
| 10,747
|
import sys
print('Python version:')
print(sys.version)
try:
import numpy
print('Numpy: {0}'.format(numpy.__version__))
except:
print('NO Numpy')
try:
import matplotlib
print('Matplotlib: {0}'.format(matplotlib.__version__))
except:
print('NO matplotlib')
try:
import scipy
print('scipy: {0}'.format(scipy.__version__))
except:
print('NO scipy')
try:
import pyCloudy
print('pyCloudy: {0}'.format(pyCloudy.__version__))
except:
print('NO pyCloudy')
try:
import pyneb
print('pyneb: {0}'.format(pyneb.__version__))
except:
print('NO pyneb')
try:
import atpy
print('atpy: {0}'.format(atpy.__version__))
except:
print('NO atpy')
try:
import asciitable
print('asciitable: {0}'.format(asciitable.__version__))
except:
print('NO asciitable')
try:
import pyfits
print('pyfits: {0}'.format(pyfits.__version__))
except:
try:
import astropy.io.fits as pyfits
print('pyfits: from astropy')
except:
pn.log_.error('pyfits not installed')
|
Morisset/PyNeb_devel
|
pyneb/utils/test_config.py
|
Python
|
gpl-3.0
| 1,058
|
from time import sleep
import curses
import os
def plot_border():
for a in [0,79]:
for b in range(0,22):
stdscr.addstr(b,a,"*", curses.color_pair(1))
for a in [0,22]:
for b in range(0,80):
stdscr.addstr(a,b,"*", curses.color_pair(1))
#stdscr.addstr(22,78,"*", curses.color_pair(1))
def check_keys():
charin = stdscr.getch()
if charin == ord('\\'):
return "\\"
if charin == ord('/'):
return "/"
return None
def plot_paddle(input_key):
stdscr.addstr(ball_location[1],ball_location[0],input_key)
scorechange = (1/ball_movement[2])
ball_movement[2] = ball_movement[2]-0.004
return scorechange
def check_paddle_collision():
in_key = stdscr.inch(ball_location[1],ball_location[0])
if in_key == ord("/") and ball_movement[0] == 1 and ball_movement[1] == 0:
ball_movement[0] = 0
ball_movement[1] = -1
move_ball()
elif in_key == ord("/") and ball_movement[0] == -1 and ball_movement[1] == 0:
ball_movement[0] = 0
ball_movement[1] = 1
move_ball()
elif in_key == ord("/") and ball_movement[0] == 0 and ball_movement[1] == 1:
ball_movement[0] = -1
ball_movement[1] = 0
move_ball()
elif in_key == ord("/") and ball_movement[0] == 0 and ball_movement[1] == -1:
ball_movement[0] = 1
ball_movement[1] = 0
move_ball()
elif in_key == ord("\\") and ball_movement[0] == 1 and ball_movement[1] == 0:
ball_movement[0] = 0
ball_movement[1] = 1
move_ball()
elif in_key == ord("\\") and ball_movement[0] == -1 and ball_movement[1] == 0:
ball_movement[0] = 0
ball_movement[1] = -1
move_ball()
elif in_key == ord("\\") and ball_movement[0] == 0 and ball_movement[1] == 1:
ball_movement[0] = 1
ball_movement[1] = 0
move_ball()
elif in_key == ord("\\") and ball_movement[0] == 0 and ball_movement[1] == -1:
ball_movement[0] = -1
ball_movement[1] = 0
move_ball()
return
def check_wall_collision():
if ball_location[0] <= 0 or ball_location[0] >= 79 or ball_location[1] >= 22 or ball_location[1] <= 0:
return True
else:
return None
def move_ball():
ball_location[0] = ball_location[0] + ball_movement[0]
ball_location[1] = ball_location[1] + ball_movement[1]
def plot_ball():
stdscr.addstr(ball_location[1],ball_location[0], "o") # plot ball
stdscr = curses.initscr()
curses.noecho()
curses.curs_set(0)
quit = False
ball_movement = [-1,0, 0.1] # x movement, y movement, delay (seconds)
ball_location = [40,12]
stdscr.nodelay(1)
collide = None
score = 0
curses.start_color()
curses.init_pair(2, curses.COLOR_RED, curses.COLOR_YELLOW)
curses.init_pair(1, curses.COLOR_YELLOW, curses.COLOR_RED)
plot_border()
stdscr.addstr(23,35,"SCORE: " + str(int(score)), curses.color_pair(2))
while not(collide):
plot_ball()
stdscr.refresh()
sleep(ball_movement[2])
stdscr.addstr(ball_location[1],ball_location[0], ' ') # erase ball
move_ball()
input_command = None
collide = None
input_command = check_keys()
if input_command:
scorediff = plot_paddle(input_command)
score = score + scorediff
stdscr.addstr(23,35,"SCORE: " + str(int(score)), curses.color_pair(2))
check_paddle_collision() # is ball about to hit a paddle?
collide = check_wall_collision() # is ball about to hit a wall?
curses.nocbreak()
stdscr.keypad(0)
curses.endwin()
os.system('clear')
print "You hit the wall!"
print ""
print "Final score: " + str(int(score))
|
bmcollier/paddler
|
paddler.py
|
Python
|
unlicense
| 3,712
|
from flask import request
from flask_restful import Resource
import json
from core.bo.clienteBo import ClienteBo
class Cliente(Resource):
def __init__(self):
self.cliente = ClienteBo()
def get(self, parameter=""):
if parameter == "":
return self.cliente.get_all(), 201
else:
parameter = json.loads(parameter)
if parameter.get('id'):
return self.cliente.get_by_id(parameter["id"]), 201
elif parameter.get('document'):
return self.cliente.get_document(parameter["document"], parameter["cliente"]), 201
elif parameter.get('board'):
return self.cliente.get_board(), 201
else:
return self.cliente.get_by_filter(parameter), 201
def put(self, parameter):
cliente = request.json
return self.cliente.update(parameter, cliente), 201
def post(self, parameter):
file = request.files['arquivo']
return self.cliente.upload(parameter, file), 201
|
guigovedovato/python
|
api/clienteApi.py
|
Python
|
gpl-3.0
| 1,042
|
# -*- coding: utf-8 -*-
"""
test/integration
~~~~~~~~~~~~~~~~
This file defines integration-type tests for hyper. These are still not fully
hitting the network, so that's alright.
"""
import base64
import requests
import threading
import time
import hyper
import hyper.http11.connection
import pytest
from socket import timeout as SocketTimeout
from contextlib import contextmanager
from mock import patch
from concurrent.futures import ThreadPoolExecutor, TimeoutError
from h2.frame_buffer import FrameBuffer
from hyper.compat import ssl
from hyper.contrib import HTTP20Adapter
from hyper.common.exceptions import ProxyError
from hyper.common.util import HTTPVersion, to_bytestring
from hyperframe.frame import (
Frame, SettingsFrame, WindowUpdateFrame, DataFrame, HeadersFrame,
GoAwayFrame, RstStreamFrame
)
from hpack.hpack import Encoder
from hpack.huffman import HuffmanEncoder
from hpack.huffman_constants import (
REQUEST_CODES, REQUEST_CODES_LENGTH
)
from hyper.http20.exceptions import ConnectionError, StreamResetError
from server import SocketLevelTest, SocketSecuritySetting
# Turn off certificate verification for the tests.
if ssl is not None:
hyper.tls._context = hyper.tls.init_context()
hyper.tls._context.check_hostname = False
hyper.tls._context.verify_mode = ssl.CERT_NONE
# Cover our bases because NPN doesn't yet work on all our test platforms.
PROTOCOLS = hyper.http20.connection.H2_NPN_PROTOCOLS + ['', None]
def decode_frame(frame_data):
f, length = Frame.parse_frame_header(frame_data[:9])
f.parse_body(memoryview(frame_data[9:9 + length]))
assert 9 + length == len(frame_data)
return f
def build_headers_frame(headers, encoder=None):
f = HeadersFrame(1)
e = encoder
if e is None:
e = Encoder()
e.huffman_coder = HuffmanEncoder(REQUEST_CODES, REQUEST_CODES_LENGTH)
f.data = e.encode(headers)
f.flags.add('END_HEADERS')
return f
@pytest.fixture
def frame_buffer():
buffer = FrameBuffer()
buffer.max_frame_size = 65535
return buffer
@contextmanager
def reusable_frame_buffer(buffer):
# FrameBuffer does not return new iterator for iteration.
data = buffer.data
yield buffer
buffer.data = data
def receive_preamble(sock):
# Receive the HTTP/2 'preamble'.
client_preface = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
got = b''
while len(got) < len(client_preface):
tmp = sock.recv(len(client_preface) - len(got))
assert len(tmp) > 0, "unexpected EOF"
got += tmp
assert got == client_preface, "client preface mismatch"
# Send server side HTTP/2 preface
sock.send(SettingsFrame(0).serialize())
# Drain to let the client proceed.
# Note that in the lower socket level, this method is not
# just doing "receive".
return sock.recv(65535)
@patch('hyper.http20.connection.H2_NPN_PROTOCOLS', PROTOCOLS)
class TestHyperIntegration(SocketLevelTest):
# These are HTTP/2 tests.
h2 = True
def test_connection_string(self):
self.set_up()
# Confirm that we send the connection upgrade string and the initial
# SettingsFrame.
data = []
send_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We should get one big chunk.
first = sock.recv(65535)
data.append(first)
# We need to send back a SettingsFrame.
f = SettingsFrame(0)
sock.send(f.serialize())
send_event.set()
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.connect()
send_event.wait(5)
assert data[0].startswith(b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n')
self.tear_down()
def test_initial_settings(self, frame_buffer):
self.set_up()
# Confirm that we send the connection upgrade string and the initial
# SettingsFrame.
data = []
send_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We get one big chunk.
first = sock.recv(65535)
data.append(first)
# We need to send back a SettingsFrame.
f = SettingsFrame(0)
sock.send(f.serialize())
send_event.set()
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.connect()
send_event.wait(5)
# Get the chunk of data after the preamble and decode it into frames.
# We actually expect two, but only the second one contains ENABLE_PUSH.
preamble_size = len(b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n')
data = data[0][preamble_size:]
frame_buffer.add_data(data)
frames = list(frame_buffer)
f = frames[1]
assert isinstance(f, SettingsFrame)
assert f.stream_id == 0
assert f.settings == {
SettingsFrame.ENABLE_PUSH: 0,
}
self.tear_down()
def test_stream_level_window_management(self):
self.set_up()
data = []
send_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# Dispose of the first packet.
sock.recv(65535)
# Send a Settings frame that reduces the flow-control window to
# 64 bytes.
f = SettingsFrame(0)
f.settings[SettingsFrame.INITIAL_WINDOW_SIZE] = 64
sock.send(f.serialize())
# Grab three frames, the settings ACK, the initial headers frame,
# and the first data frame.
for x in range(0, 3):
data.append(sock.recv(65535))
# Send a WindowUpdate giving more window room to the stream.
f = WindowUpdateFrame(1)
f.window_increment = 64
sock.send(f.serialize())
# Send one that gives more room to the connection.
f = WindowUpdateFrame(0)
f.window_increment = 64
sock.send(f.serialize())
# Reeive the remaining frame.
data.append(sock.recv(65535))
send_event.set()
# We're done.
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.putrequest('GET', '/')
conn.endheaders()
# Send the first data chunk. This is 32 bytes.
sd = b'a' * 32
conn.send(sd)
# Send the second one. This should block until the WindowUpdate comes
# in.
sd = sd * 2
conn.send(sd, final=True)
assert send_event.wait(0.3)
# Decode the frames.
frames = [decode_frame(d) for d in data]
# We care about the last two. The first should be a data frame
# containing 32 bytes.
assert (isinstance(frames[-2], DataFrame) and
not isinstance(frames[-2], HeadersFrame))
assert len(frames[-2].data) == 32
# The second should be a data frame containing 64 bytes.
assert isinstance(frames[-1], DataFrame)
assert len(frames[-1].data) == 64
self.tear_down()
def test_connection_context_manager(self):
self.set_up()
data = []
send_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
first = sock.recv(65535)
data.append(first)
# We need to send back a SettingsFrame.
f = SettingsFrame(0)
sock.send(f.serialize())
sock.recv(65535)
send_event.wait(5)
sock.close()
self._start_server(socket_handler)
with self.get_connection() as conn:
conn.connect()
send_event.set()
# Check that we closed the connection.
assert conn._sock is None
self.tear_down()
def test_closed_responses_remove_their_streams_from_conn(self):
self.set_up()
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We're going to get the two messages for the connection open, then
# a headers frame.
receive_preamble(sock)
sock.recv(65535)
# Wait for request
req_event.wait(5)
# Now, send the headers for the response.
f = build_headers_frame([(':status', '200')])
f.stream_id = 1
sock.send(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.request('GET', '/')
req_event.set()
resp = conn.get_response()
# Close the response.
resp.close()
recv_event.set()
assert not conn.streams
self.tear_down()
def test_receiving_responses_with_no_body(self):
self.set_up()
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We get two messages for the connection open and then a HEADERS
# frame.
receive_preamble(sock)
sock.recv(65535)
# Wait for request
req_event.wait(5)
# Now, send the headers for the response. This response has no body
f = build_headers_frame(
[(':status', '204'), ('content-length', '0')]
)
f.flags.add('END_STREAM')
f.stream_id = 1
sock.send(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.request('GET', '/')
req_event.set()
resp = conn.get_response()
# Confirm the status code.
assert resp.status == 204
# Confirm that we can read this, but it has no body.
assert resp.read() == b''
assert resp._stream._in_window_manager.document_size == 0
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_receiving_trailers(self):
self.set_up()
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
e = Encoder()
# We get two messages for the connection open and then a HEADERS
# frame.
receive_preamble(sock)
sock.recv(65535)
# Wait for request
req_event.wait(5)
# Now, send the headers for the response.
f = build_headers_frame(
[(':status', '200'), ('content-length', '14')],
e
)
f.stream_id = 1
sock.send(f.serialize())
# Also send a data frame.
f = DataFrame(1)
f.data = b'have some data'
sock.send(f.serialize())
# Now, send a headers frame again, containing trailing headers.
f = build_headers_frame([
('trialing', 'no'),
('trailing', 'sure')], e)
f.flags.add('END_STREAM')
f.stream_id = 1
sock.send(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.request('GET', '/')
req_event.set()
resp = conn.get_response()
# Confirm the status code.
assert resp.status == 200
# Confirm that we can read this.
assert resp.read() == b'have some data'
assert resp._stream._in_window_manager.document_size == 14
# Confirm that we got the trailing headers, and that they don't contain
# reserved headers.
assert resp.trailers['trailing'] == [b'sure']
assert resp.trailers['trialing'] == [b'no']
assert resp.trailers.get(':res') is None
assert len(resp.headers) == 1
assert len(resp.trailers) == 2
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_receiving_trailers_before_reading(self):
self.set_up()
req_event = threading.Event()
wait_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
e = Encoder()
# We get two messages for the connection open and then a HEADERS
# frame.
receive_preamble(sock)
sock.recv(65535)
# Wait for request
req_event.wait(5)
# Now, send the headers for the response.
f = build_headers_frame(
[(':status', '200'), ('content-length', '14')],
e
)
f.stream_id = 1
sock.send(f.serialize())
# Also send a data frame.
f = DataFrame(1)
f.data = b'have some data'
sock.send(f.serialize())
# Wait for the main thread to signal that it wants the trailers,
# then delay slightly.
wait_event.wait(5)
time.sleep(0.5)
# Now, send a headers frame again, containing trailing headers.
f = build_headers_frame([
('trialing', 'no'),
('trailing', 'sure')], e)
f.flags.add('END_STREAM')
f.stream_id = 1
sock.send(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.request('GET', '/')
req_event.set()
resp = conn.get_response()
# Confirm the status code.
assert resp.status == 200
# Ask for the trailers.
wait_event.set()
# Confirm that we got the trailing headers, and that they don't contain
# reserved headers. More importantly, check the trailers *first*,
# before we read from the stream.
assert resp.trailers['trailing'] == [b'sure']
assert resp.trailers['trialing'] == [b'no']
assert len(resp.headers) == 1
assert len(resp.trailers) == 2
# Confirm that the stream is still readable.
assert resp.read() == b'have some data'
assert resp._stream._in_window_manager.document_size == 14
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_clean_shut_down(self):
self.set_up()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We should get one packet. Rather than respond to it, send a
# GOAWAY frame with error code 0 indicating clean shutdown.
sock.recv(65535)
# Now, send the shut down.
f = GoAwayFrame(0)
f.error_code = 0
sock.send(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.connect()
# Confirm the connection is closed.
assert conn._sock is None
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_unexpected_shut_down(self):
self.set_up()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We should get one packet. Rather than respond to it, send a
# GOAWAY frame with error code 0 indicating clean shutdown.
sock.recv(65535)
# Now, send the shut down.
f = GoAwayFrame(0)
f.error_code = 1
sock.send(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
with pytest.raises(ConnectionError):
conn.connect()
# Confirm the connection is closed.
assert conn._sock is None
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_insecure_connection(self):
self.set_up(secure=False)
data = []
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
receive_preamble(sock)
data.append(sock.recv(65535))
req_event.wait(5)
h = HeadersFrame(1)
h.data = self.get_encoder().encode(
[
(':status', 200),
('content-type', 'not/real'),
('content-length', 14),
('server', 'socket-level-server')
]
)
h.flags.add('END_HEADERS')
sock.send(h.serialize())
d = DataFrame(1)
d.data = b'nsaislistening'
d.flags.add('END_STREAM')
sock.send(d.serialize())
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
c = self.get_connection()
c.request('GET', '/')
req_event.set()
r = c.get_response()
assert r.status == 200
assert len(r.headers) == 3
assert r.headers[b'server'] == [b'socket-level-server']
assert r.headers[b'content-length'] == [b'14']
assert r.headers[b'content-type'] == [b'not/real']
assert r.read() == b'nsaislistening'
recv_event.set()
self.tear_down()
def test_insecure_proxy_connection(self):
self.set_up(secure=False, proxy=True)
data = []
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
receive_preamble(sock)
data.append(sock.recv(65535))
req_event.wait(5)
h = HeadersFrame(1)
h.data = self.get_encoder().encode(
[
(':status', 200),
('content-type', 'not/real'),
('content-length', 12),
('server', 'socket-level-server')
]
)
h.flags.add('END_HEADERS')
sock.send(h.serialize())
d = DataFrame(1)
d.data = b'thisisaproxy'
d.flags.add('END_STREAM')
sock.send(d.serialize())
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
c = self.get_connection()
c.request('GET', '/')
req_event.set()
r = c.get_response()
assert r.status == 200
assert len(r.headers) == 3
assert r.headers[b'server'] == [b'socket-level-server']
assert r.headers[b'content-length'] == [b'12']
assert r.headers[b'content-type'] == [b'not/real']
assert r.read() == b'thisisaproxy'
recv_event.set()
self.tear_down()
def test_secure_proxy_connection(self):
self.set_up(secure=SocketSecuritySetting.SECURE_NO_AUTO_WRAP,
proxy=True)
data = []
connect_request_headers = []
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# Read the CONNECT request
while not b''.join(connect_request_headers).endswith(b'\r\n\r\n'):
connect_request_headers.append(sock.recv(65535))
sock.send(b'HTTP/1.0 200 Connection established\r\n\r\n')
sock = self.server_thread.wrap_socket(sock)
receive_preamble(sock)
data.append(sock.recv(65535))
req_event.wait(5)
h = HeadersFrame(1)
h.data = self.get_encoder().encode(
[
(':status', 200),
('content-type', 'not/real'),
('content-length', 12),
('server', 'socket-level-server')
]
)
h.flags.add('END_HEADERS')
sock.send(h.serialize())
d = DataFrame(1)
d.data = b'thisisaproxy'
d.flags.add('END_STREAM')
sock.send(d.serialize())
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
c = self.get_connection()
c.request('GET', '/')
req_event.set()
r = c.get_response()
assert r.status == 200
assert len(r.headers) == 3
assert r.headers[b'server'] == [b'socket-level-server']
assert r.headers[b'content-length'] == [b'12']
assert r.headers[b'content-type'] == [b'not/real']
assert r.read() == b'thisisaproxy'
assert (to_bytestring(
'CONNECT %s:%d HTTP/1.1\r\n\r\n' % (c.host, c.port)) ==
b''.join(connect_request_headers))
recv_event.set()
self.tear_down()
def test_failing_proxy_tunnel(self):
self.set_up(secure=SocketSecuritySetting.SECURE_NO_AUTO_WRAP,
proxy=True)
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# Read the CONNECT request
connect_data = b''
while not connect_data.endswith(b'\r\n\r\n'):
connect_data += sock.recv(65535)
sock.send(b'HTTP/1.0 407 Proxy Authentication Required\r\n\r\n')
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
try:
conn.connect()
assert False, "Exception should have been thrown"
except ProxyError as e:
assert e.response.status == 407
assert e.response.reason == b'Proxy Authentication Required'
# Confirm the connection is closed.
assert conn._sock is None
recv_event.set()
self.tear_down()
def test_resetting_stream_with_frames_in_flight(self):
"""
Hyper emits only one RST_STREAM frame, despite the other frames in
flight.
"""
self.set_up()
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We get two messages for the connection open and then a HEADERS
# frame.
receive_preamble(sock)
sock.recv(65535)
# Wait for request
req_event.wait(5)
# Now, send the headers for the response. This response has no
# body.
f = build_headers_frame(
[(':status', '204'), ('content-length', '0')]
)
f.flags.add('END_STREAM')
f.stream_id = 1
sock.send(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
stream_id = conn.request('GET', '/')
req_event.set()
# Now, trigger the RST_STREAM frame by closing the stream.
conn._send_rst_frame(stream_id, 0)
# Now, eat the Headers frame. This should not cause an exception.
conn._recv_cb()
# However, attempting to get the response should.
with pytest.raises(StreamResetError):
conn.get_response(stream_id)
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_stream_can_be_reset_multiple_times(self):
"""
Confirm that hyper gracefully handles receiving multiple RST_STREAM
frames.
"""
self.set_up()
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We get two messages for the connection open and then a HEADERS
# frame.
receive_preamble(sock)
sock.recv(65535)
# Wait for request
req_event.wait(5)
# Now, send two RST_STREAM frames.
for _ in range(0, 2):
f = RstStreamFrame(1)
sock.send(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.request('GET', '/')
req_event.set()
# Now, eat the Rst frames. These should not cause an exception.
conn._single_read()
conn._single_read()
# However, attempting to get the response should.
with pytest.raises(StreamResetError):
conn.get_response(1)
assert conn.reset_streams == set([1])
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_read_chunked_http2(self):
self.set_up()
req_event = threading.Event()
recv_event = threading.Event()
wait_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We get two messages for the connection open and then a HEADERS
# frame.
receive_preamble(sock)
sock.recv(65535)
# Wait for request
req_event.wait(5)
# Now, send the headers for the response. This response has a body.
f = build_headers_frame([(':status', '200')])
f.stream_id = 1
sock.send(f.serialize())
# Send the first two chunks.
f = DataFrame(1)
f.data = b'hello'
sock.sendall(f.serialize())
f = DataFrame(1)
f.data = b'there'
sock.sendall(f.serialize())
# Now, delay a bit. We want to wait a half a second before we send
# the next frame.
wait_event.wait(5)
time.sleep(0.5)
f = DataFrame(1)
f.data = b'world'
f.flags.add('END_STREAM')
sock.sendall(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.request('GET', '/')
req_event.set()
resp = conn.get_response()
# Confirm the status code.
assert resp.status == 200
# Confirm that we can read this, but it has no body. First two chunks
# should be easy, then set the event and read the next one.
chunks = resp.read_chunked()
first_chunk = next(chunks)
second_chunk = next(chunks)
wait_event.set()
third_chunk = next(chunks)
with pytest.raises(StopIteration):
next(chunks)
assert first_chunk == b'hello'
assert second_chunk == b'there'
assert third_chunk == b'world'
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_read_delayed(self):
self.set_up()
req_event = threading.Event()
wait_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We get two messages for the connection open and then a HEADERS
# frame.
receive_preamble(sock)
sock.recv(65535)
# Wait for request
req_event.wait(5)
# Now, send the headers for the response. This response has a body.
f = build_headers_frame([(':status', '200')])
f.stream_id = 1
sock.send(f.serialize())
# Send the first two chunks.
f = DataFrame(1)
f.data = b'hello'
sock.sendall(f.serialize())
f = DataFrame(1)
f.data = b'there'
sock.sendall(f.serialize())
# Now, delay a bit. We want to wait a half a second before we send
# the next frame.
wait_event.wait(5)
time.sleep(0.5)
f = DataFrame(1)
f.data = b'world'
f.flags.add('END_STREAM')
sock.sendall(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.request('GET', '/')
req_event.set()
resp = conn.get_response()
# Confirm the status code.
assert resp.status == 200
first_chunk = resp.read(10)
wait_event.set()
second_chunk = resp.read(5)
assert first_chunk == b'hellothere'
assert second_chunk == b'world'
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_upgrade(self):
self.set_up(secure=False)
wait_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# First read the HTTP/1.1 request
data = b''
while not data.endswith(b'\r\n\r\n'):
data += sock.recv(65535)
# Check it's an upgrade.
assert b'upgrade: h2c\r\n' in data
# Send back an upgrade message.
data = (
b'HTTP/1.1 101 Switching Protocols\r\n'
b'Server: some-server\r\n'
b'Connection: upgrade\r\n'
b'Upgrade: h2c\r\n'
b'\r\n'
)
sock.sendall(data)
# We get a message for connection open, specifically the preamble.
receive_preamble(sock)
# Now, send the headers for the response. This response has a body.
f = build_headers_frame([(':status', '200')])
f.stream_id = 1
sock.sendall(f.serialize())
# Send the first two chunks.
f = DataFrame(1)
f.data = b'hello'
sock.sendall(f.serialize())
f = DataFrame(1)
f.data = b'there'
sock.sendall(f.serialize())
# Now, delay a bit. We want to wait a half a second before we send
# the next frame.
wait_event.wait(5)
time.sleep(0.5)
f = DataFrame(1)
f.data = b'world'
f.flags.add('END_STREAM')
sock.sendall(f.serialize())
# Wait for the message from the main thread.
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
conn = hyper.HTTPConnection(self.host, self.port, self.secure)
conn.request('GET', '/')
resp = conn.get_response()
# Confirm the status code.
assert resp.status == 200
first_chunk = resp.read(10)
wait_event.set()
second_chunk = resp.read(5)
assert first_chunk == b'hellothere'
assert second_chunk == b'world'
# Awesome, we're done now.
recv_event.set()
self.tear_down()
def test_version_after_tls_upgrade(self, monkeypatch):
self.set_up()
# We need to patch the ssl_wrap_socket method to ensure that we
# forcefully upgrade.
old_wrap_socket = hyper.http11.connection.wrap_socket
def wrap(*args):
sock, _ = old_wrap_socket(*args)
return sock, 'h2'
monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap)
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
receive_preamble(sock)
# Wait for the request
req_event.wait(5)
# Send the headers for the response. This response has no body.
f = build_headers_frame(
[(':status', '200'), ('content-length', '0')]
)
f.flags.add('END_STREAM')
f.stream_id = 1
sock.sendall(f.serialize())
# wait for the message from the main thread
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
c = hyper.HTTPConnection(self.host, self.port, secure=True)
assert c.version is HTTPVersion.http11
assert c.version is not HTTPVersion.http20
c.request('GET', '/')
req_event.set()
assert c.version is HTTPVersion.http20
recv_event.set()
self.tear_down()
def test_version_after_http_upgrade(self):
self.set_up()
self.secure = False
req_event = threading.Event()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We should get the initial request.
data = b''
while not data.endswith(b'\r\n\r\n'):
data += sock.recv(65535)
assert b'upgrade: h2c\r\n' in data
req_event.wait(5)
# We need to send back a response.
resp = (
b'HTTP/1.1 101 Upgrade\r\n'
b'Server: socket-level-server\r\n'
b'Content-Length: 0\r\n'
b'Connection: upgrade\r\n'
b'Upgrade: h2c\r\n'
b'\r\n'
)
sock.sendall(resp)
# We get a message for connection open, specifically the preamble.
receive_preamble(sock)
# Send the headers for the response. This response has a body.
f = build_headers_frame(
[(':status', '200'), ('content-length', '0')]
)
f.stream_id = 1
f.flags.add('END_STREAM')
sock.sendall(f.serialize())
# keep the socket open for clean shutdown
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
c = hyper.HTTPConnection(self.host, self.port)
assert c.version is HTTPVersion.http11
c.request('GET', '/')
req_event.set()
resp = c.get_response()
assert c.version is HTTPVersion.http20
assert resp.version is HTTPVersion.http20
recv_event.set()
self.tear_down()
def test_connection_and_send_simultaneously(self):
# Since deadlock occurs probabilistic,
# It still has deadlock probability
# even the testcase is passed.
self.set_up()
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
receive_preamble(sock)
sock.recv(65535)
recv_event.set()
sock.close()
def do_req(conn):
conn.request('GET', '/')
recv_event.wait()
def do_connect(conn):
conn.connect()
self._start_server(socket_handler)
conn = self.get_connection()
pool = ThreadPoolExecutor(max_workers=2)
pool.submit(do_connect, conn)
f = pool.submit(do_req, conn)
try:
f.result(timeout=10)
except TimeoutError:
assert False
self.tear_down()
def test_connection_timeout(self):
self.set_up(timeout=0.5)
def socket_handler(listener):
time.sleep(1)
self._start_server(socket_handler)
conn = self.get_connection()
with pytest.raises((SocketTimeout, ssl.SSLError)):
# Py2 raises this as a BaseSSLError,
# Py3 raises it as socket timeout.
conn.connect()
self.tear_down()
def test_hyper_connection_timeout(self):
self.set_up(timeout=0.5)
def socket_handler(listener):
time.sleep(1)
self._start_server(socket_handler)
conn = hyper.HTTPConnection(self.host, self.port, self.secure,
timeout=self.timeout)
with pytest.raises((SocketTimeout, ssl.SSLError)):
# Py2 raises this as a BaseSSLError,
# Py3 raises it as socket timeout.
conn.request('GET', '/')
self.tear_down()
def test_read_timeout(self):
self.set_up(timeout=(10, 0.5))
req_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We get two messages for the connection open and then a HEADERS
# frame.
receive_preamble(sock)
sock.recv(65535)
# Wait for request
req_event.wait(5)
# Sleep wait for read timeout
time.sleep(1)
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
conn.request('GET', '/')
req_event.set()
with pytest.raises((SocketTimeout, ssl.SSLError)):
# Py2 raises this as a BaseSSLError,
# Py3 raises it as socket timeout.
conn.get_response()
self.tear_down()
def test_default_connection_timeout(self):
self.set_up(timeout=None)
# Confirm that we send the connection upgrade string and the initial
# SettingsFrame.
data = []
send_event = threading.Event()
def socket_handler(listener):
time.sleep(1)
sock = listener.accept()[0]
# We should get one big chunk.
first = sock.recv(65535)
data.append(first)
# We need to send back a SettingsFrame.
f = SettingsFrame(0)
sock.send(f.serialize())
send_event.set()
sock.close()
self._start_server(socket_handler)
conn = self.get_connection()
try:
conn.connect()
except (SocketTimeout, ssl.SSLError):
# Py2 raises this as a BaseSSLError,
# Py3 raises it as socket timeout.
pytest.fail()
send_event.wait(5)
assert data[0].startswith(b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n')
self.tear_down()
@patch('hyper.http20.connection.H2_NPN_PROTOCOLS', PROTOCOLS)
class TestRequestsAdapter(SocketLevelTest):
# This uses HTTP/2.
h2 = True
def test_adapter_received_values(self, monkeypatch, frame_buffer):
self.set_up()
# We need to patch the ssl_wrap_socket method to ensure that we
# forcefully upgrade.
old_wrap_socket = hyper.http11.connection.wrap_socket
def wrap(*args):
sock, _ = old_wrap_socket(*args)
return sock, 'h2'
monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap)
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# Do the handshake: conn header, settings, send settings, recv ack.
frame_buffer.add_data(receive_preamble(sock))
# Now expect some data. One headers frame.
req_wait = True
while req_wait:
frame_buffer.add_data(sock.recv(65535))
with reusable_frame_buffer(frame_buffer) as fr:
for f in fr:
if isinstance(f, HeadersFrame):
req_wait = False
# Respond!
h = HeadersFrame(1)
h.data = self.get_encoder().encode(
[
(':status', 200),
('content-type', 'not/real'),
('content-length', 20),
]
)
h.flags.add('END_HEADERS')
sock.send(h.serialize())
d = DataFrame(1)
d.data = b'1234567890' * 2
d.flags.add('END_STREAM')
sock.send(d.serialize())
# keep the socket open for clean shutdown
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
s = requests.Session()
s.mount('https://%s' % self.host, HTTP20Adapter())
r = s.get('https://%s:%s/some/path' % (self.host, self.port))
# Assert about the received values.
assert r.status_code == 200
assert r.headers['Content-Type'] == 'not/real'
assert r.content == b'1234567890' * 2
recv_event.set()
self.tear_down()
def test_adapter_sending_values(self, monkeypatch, frame_buffer):
self.set_up()
# We need to patch the ssl_wrap_socket method to ensure that we
# forcefully upgrade.
old_wrap_socket = hyper.http11.connection.wrap_socket
def wrap(*args):
sock, _ = old_wrap_socket(*args)
return sock, 'h2'
monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap)
recv_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# Do the handshake: conn header, settings, send settings, recv ack.
frame_buffer.add_data(receive_preamble(sock))
# Now expect some data. One headers frame and one data frame.
req_wait = True
while req_wait:
frame_buffer.add_data(sock.recv(65535))
with reusable_frame_buffer(frame_buffer) as fr:
for f in fr:
if isinstance(f, DataFrame):
req_wait = False
# Respond!
h = HeadersFrame(1)
h.data = self.get_encoder().encode(
[
(':status', 200),
('content-type', 'not/real'),
('content-length', 20),
]
)
h.flags.add('END_HEADERS')
sock.send(h.serialize())
d = DataFrame(1)
d.data = b'1234567890' * 2
d.flags.add('END_STREAM')
sock.send(d.serialize())
# keep the socket open for clean shutdown
recv_event.wait(5)
sock.close()
self._start_server(socket_handler)
s = requests.Session()
s.mount('https://%s' % self.host, HTTP20Adapter())
r = s.post(
'https://%s:%s/some/path' % (self.host, self.port),
data='hi there',
)
# Assert about the sent values.
assert r.status_code == 200
frames = list(frame_buffer)
assert isinstance(frames[-2], HeadersFrame)
assert isinstance(frames[-1], DataFrame)
assert frames[-1].data == b'hi there'
recv_event.set()
self.tear_down()
def test_adapter_uses_proxies(self):
self.set_up(secure=SocketSecuritySetting.SECURE_NO_AUTO_WRAP,
proxy=True)
send_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# Read the CONNECT request
connect_data = b''
while not connect_data.endswith(b'\r\n\r\n'):
connect_data += sock.recv(65535)
sock.send(b'HTTP/1.0 200 Connection established\r\n\r\n')
sock = self.server_thread.wrap_socket(sock)
# We should get the initial request.
data = b''
while not data.endswith(b'\r\n\r\n'):
data += sock.recv(65535)
send_event.wait()
# We need to send back a response.
resp = (
b'HTTP/1.1 201 No Content\r\n'
b'Server: socket-level-server\r\n'
b'Content-Length: 0\r\n'
b'Connection: close\r\n'
b'\r\n'
)
sock.send(resp)
sock.close()
self._start_server(socket_handler)
s = requests.Session()
s.proxies = {'all': 'http://%s:%s' % (self.host, self.port)}
s.mount('https://', HTTP20Adapter())
send_event.set()
r = s.get('https://foobar/')
assert r.status_code == 201
assert len(r.headers) == 3
assert r.headers['server'] == 'socket-level-server'
assert r.headers['content-length'] == '0'
assert r.headers['connection'] == 'close'
assert r.content == b''
self.tear_down()
def test_adapter_uses_proxy_auth_for_secure(self):
self.set_up(secure=SocketSecuritySetting.SECURE_NO_AUTO_WRAP,
proxy=True)
send_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# Read the CONNECT request
connect_data = b''
while not connect_data.endswith(b'\r\n\r\n'):
connect_data += sock.recv(65535)
# Ensure that request contains the proper Proxy-Authorization
# header
assert (b'CONNECT foobar:443 HTTP/1.1\r\n'
b'Proxy-Authorization: Basic ' +
base64.b64encode(b'foo:bar') + b'\r\n'
b'\r\n') == connect_data
sock.send(b'HTTP/1.0 200 Connection established\r\n\r\n')
sock = self.server_thread.wrap_socket(sock)
# We should get the initial request.
data = b''
while not data.endswith(b'\r\n\r\n'):
data += sock.recv(65535)
# Ensure that proxy headers are not passed via tunnelled connection
assert b'Proxy-Authorization:' not in data
send_event.wait()
# We need to send back a response.
resp = (
b'HTTP/1.1 201 No Content\r\n'
b'Server: socket-level-server\r\n'
b'Content-Length: 0\r\n'
b'Connection: close\r\n'
b'\r\n'
)
sock.send(resp)
sock.close()
self._start_server(socket_handler)
s = requests.Session()
s.proxies = {'all': 'http://foo:bar@%s:%s' % (self.host, self.port)}
s.mount('https://', HTTP20Adapter())
send_event.set()
r = s.get('https://foobar/')
assert r.status_code == 201
assert len(r.headers) == 3
assert r.headers['server'] == 'socket-level-server'
assert r.headers['content-length'] == '0'
assert r.headers['connection'] == 'close'
assert r.content == b''
self.tear_down()
def test_adapter_uses_proxy_auth_for_insecure(self):
self.set_up(secure=False, proxy=True)
send_event = threading.Event()
def socket_handler(listener):
sock = listener.accept()[0]
# We should get the initial request.
connect_data = b''
while not connect_data.endswith(b'\r\n\r\n'):
connect_data += sock.recv(65535)
# Ensure that request contains the proper Proxy-Authorization
# header
assert (b'Proxy-Authorization: Basic ' +
base64.b64encode(b'foo:bar') + b'\r\n'
).lower() in connect_data.lower()
send_event.wait()
# We need to send back a response.
resp = (
b'HTTP/1.1 201 No Content\r\n'
b'Server: socket-level-server\r\n'
b'Content-Length: 0\r\n'
b'Connection: close\r\n'
b'\r\n'
)
sock.send(resp)
sock.close()
self._start_server(socket_handler)
s = requests.Session()
s.proxies = {'all': 'http://foo:bar@%s:%s' % (self.host, self.port)}
s.mount('http://', HTTP20Adapter())
send_event.set()
r = s.get('http://foobar/')
assert r.status_code == 201
assert len(r.headers) == 3
assert r.headers['server'] == 'socket-level-server'
assert r.headers['content-length'] == '0'
assert r.headers['connection'] == 'close'
assert r.content == b''
self.tear_down()
def test_adapter_connection_timeout(self, monkeypatch, frame_buffer):
self.set_up()
# We need to patch the ssl_wrap_socket method to ensure that we
# forcefully upgrade.
old_wrap_socket = hyper.http11.connection.wrap_socket
def wrap(*args):
sock, _ = old_wrap_socket(*args)
return sock, 'h2'
monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap)
def socket_handler(listener):
time.sleep(1)
self._start_server(socket_handler)
s = requests.Session()
s.mount('https://%s' % self.host, HTTP20Adapter())
with pytest.raises((SocketTimeout, ssl.SSLError)):
# Py2 raises this as a BaseSSLError,
# Py3 raises it as socket timeout.
s.get('https://%s:%s/some/path' % (self.host, self.port),
timeout=0.5)
self.tear_down()
def test_adapter_read_timeout(self, monkeypatch, frame_buffer):
self.set_up()
# We need to patch the ssl_wrap_socket method to ensure that we
# forcefully upgrade.
old_wrap_socket = hyper.http11.connection.wrap_socket
def wrap(*args):
sock, _ = old_wrap_socket(*args)
return sock, 'h2'
monkeypatch.setattr(hyper.http11.connection, 'wrap_socket', wrap)
def socket_handler(listener):
sock = listener.accept()[0]
# Do the handshake: conn header, settings, send settings, recv ack.
frame_buffer.add_data(receive_preamble(sock))
# Now expect some data. One headers frame.
req_wait = True
while req_wait:
frame_buffer.add_data(sock.recv(65535))
with reusable_frame_buffer(frame_buffer) as fr:
for f in fr:
if isinstance(f, HeadersFrame):
req_wait = False
# Sleep wait for read timeout
time.sleep(1)
sock.close()
self._start_server(socket_handler)
s = requests.Session()
s.mount('https://%s' % self.host, HTTP20Adapter())
with pytest.raises((SocketTimeout, ssl.SSLError)):
# Py2 raises this as a BaseSSLError,
# Py3 raises it as socket timeout.
s.get('https://%s:%s/some/path' % (self.host, self.port),
timeout=(10, 0.5))
self.tear_down()
def test_adapter_close(self):
self.set_up(secure=False)
def socket_handler(listener):
sock = listener.accept()[0]
# We should get the initial request.
data = b''
while not data.endswith(b'\r\n\r\n'):
data += sock.recv(65535)
# We need to send back a response.
resp = (
b'HTTP/1.1 201 No Content\r\n'
b'Server: socket-level-server\r\n'
b'Content-Length: 0\r\n'
b'Connection: close\r\n'
b'\r\n'
)
sock.send(resp)
sock.close()
self._start_server(socket_handler)
a = HTTP20Adapter()
s = requests.Session()
s.mount('http://', a)
r = s.get('http://%s:%s' % (self.host, self.port))
connections_before_close = list(a.connections.values())
# ensure that we have at least 1 connection
assert connections_before_close
s.close()
# check that connections cache is empty
assert not a.connections
# check that all connections are actually closed
assert all(conn._sock is None for conn in connections_before_close)
assert r.status_code == 201
assert len(r.headers) == 3
assert r.headers['server'] == 'socket-level-server'
assert r.headers['content-length'] == '0'
assert r.headers['connection'] == 'close'
assert r.content == b''
self.tear_down()
def test_adapter_close_context_manager(self):
self.set_up(secure=False)
def socket_handler(listener):
sock = listener.accept()[0]
# We should get the initial request.
data = b''
while not data.endswith(b'\r\n\r\n'):
data += sock.recv(65535)
# We need to send back a response.
resp = (
b'HTTP/1.1 201 No Content\r\n'
b'Server: socket-level-server\r\n'
b'Content-Length: 0\r\n'
b'Connection: close\r\n'
b'\r\n'
)
sock.send(resp)
sock.close()
self._start_server(socket_handler)
with requests.Session() as s:
a = HTTP20Adapter()
s.mount('http://', a)
r = s.get('http://%s:%s' % (self.host, self.port))
connections_before_close = list(a.connections.values())
# ensure that we have at least 1 connection
assert connections_before_close
# check that connections cache is empty
assert not a.connections
# check that all connections are actually closed
assert all(conn._sock is None for conn in connections_before_close)
assert r.status_code == 201
assert len(r.headers) == 3
assert r.headers['server'] == 'socket-level-server'
assert r.headers['content-length'] == '0'
assert r.headers['connection'] == 'close'
assert r.content == b''
self.tear_down()
|
Lukasa/hyper
|
test/test_integration.py
|
Python
|
mit
| 54,512
|
#!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import array
from datetime import datetime
import locale
import os
import os.path
import random
import StringIO
import sys
import time
import threading
import uuid
import unittest
import urllib
from test import get_config
from swift import Account, AuthenticationFailed, Connection, Container, \
File, ResponseError
config = get_config()
locale.setlocale(locale.LC_COLLATE, config.get('collate', 'C'))
class Base:
pass
def chunks(s, length=3):
i, j = 0, length
while i < len(s):
yield s[i:j]
i, j = j, j + length
def timeout(seconds, method, *args, **kwargs):
class TimeoutThread(threading.Thread):
def __init__ (self, method, *args, **kwargs):
threading.Thread.__init__(self)
self.method = method
self.args = args
self.kwargs = kwargs
self.exception = None
def run(self):
try:
self.method(*self.args, **self.kwargs)
except Exception, e:
self.exception = e
t = TimeoutThread(method, *args, **kwargs)
t.start()
t.join(seconds)
if t.exception:
raise t.exception
if t.isAlive():
t._Thread__stop()
return True
return False
class Utils:
@classmethod
def create_ascii_name(cls, length=None):
return uuid.uuid4().hex
@classmethod
def create_utf8_name(cls, length=None):
if length is None:
length = 15
else:
length = int(length)
utf8_chars = u'\uF10F\uD20D\uB30B\u9409\u8508\u5605\u3703\u1801'\
u'\u0900\uF110\uD20E\uB30C\u940A\u8509\u5606\u3704'\
u'\u1802\u0901\uF111\uD20F\uB30D\u940B\u850A\u5607'\
u'\u3705\u1803\u0902\uF112\uD210\uB30E\u940C\u850B'\
u'\u5608\u3706\u1804\u0903\u03A9\u2603'
return ''.join([random.choice(utf8_chars) for x in \
xrange(length)]).encode('utf-8')
create_name = create_ascii_name
class Base(unittest.TestCase):
def setUp(self):
cls = type(self)
if not cls.set_up:
cls.env.setUp()
cls.set_up = True
def assert_body(self, body):
response_body = self.env.conn.response.read()
self.assert_(response_body == body,
'Body returned: %s' % (response_body))
def assert_status(self, status_or_statuses):
self.assert_(self.env.conn.response.status == status_or_statuses or
(hasattr(status_or_statuses, '__iter__') and
self.env.conn.response.status in status_or_statuses),
'Status returned: %d Expected: %s' %
(self.env.conn.response.status, status_or_statuses))
class Base2(object):
def setUp(self):
Utils.create_name = Utils.create_utf8_name
super(Base2, self).setUp()
def tearDown(self):
Utils.create_name = Utils.create_ascii_name
class TestAccountEnv:
@classmethod
def setUp(cls):
cls.conn = Connection(config)
cls.conn.authenticate()
cls.account = Account(cls.conn, config.get('account',
config['username']))
cls.account.delete_containers()
cls.containers = []
for i in range(10):
cont = cls.account.container(Utils.create_name())
if not cont.create():
raise ResponseError(cls.conn.response)
cls.containers.append(cont)
class TestAccountDev(Base):
env = TestAccountEnv
set_up = False
class TestAccountDevUTF8(Base2, TestAccountDev):
set_up = False
class TestAccount(Base):
env = TestAccountEnv
set_up = False
def testNoAuthToken(self):
self.assertRaises(ResponseError, self.env.account.info,
cfg={'no_auth_token':True})
self.assert_status([401, 412])
self.assertRaises(ResponseError, self.env.account.containers,
cfg={'no_auth_token':True})
self.assert_status([401, 412])
def testInvalidUTF8Path(self):
invalid_utf8 = Utils.create_utf8_name()[::-1]
container = self.env.account.container(invalid_utf8)
self.assert_(not container.create(cfg={'no_path_quote':True}))
self.assert_status(412)
self.assert_body('Invalid UTF8')
def testVersionOnlyPath(self):
self.env.account.conn.make_request('PUT',
cfg={'version_only_path':True})
self.assert_status(412)
self.assert_body('Bad URL')
def testInvalidPath(self):
was_url = self.env.account.conn.storage_url
self.env.account.conn.storage_url = "/%s" % was_url
self.env.account.conn.make_request('GET')
try:
self.assert_status(404)
finally:
self.env.account.conn.storage_url = was_url
def testPUT(self):
self.env.account.conn.make_request('PUT')
self.assert_status([403, 405])
def testAccountHead(self):
try_count = 0
while try_count < 5:
try_count += 1
info = self.env.account.info()
for field in ['object_count', 'container_count', 'bytes_used']:
self.assert_(info[field] >= 0)
if info['container_count'] == len(self.env.containers):
break
if try_count < 5:
time.sleep(1)
self.assertEquals(info['container_count'], len(self.env.containers))
self.assert_status(204)
def testContainerSerializedInfo(self):
container_info = {}
for container in self.env.containers:
info = {'bytes': 0}
info['count'] = random.randint(10, 30)
for i in range(info['count']):
file = container.file(Utils.create_name())
bytes = random.randint(1, 32768)
file.write_random(bytes)
info['bytes'] += bytes
container_info[container.name] = info
for format in ['json', 'xml']:
for a in self.env.account.containers(
parms={'format':format}):
self.assert_(a['count'] >= 0)
self.assert_(a['bytes'] >= 0)
headers = dict(self.env.conn.response.getheaders())
if format == 'json':
self.assertEquals(headers['content-type'],
'application/json; charset=utf-8')
elif format == 'xml':
self.assertEquals(headers['content-type'],
'application/xml; charset=utf-8')
def testListingLimit(self):
limit = 10000
for l in (1, 100, limit/2, limit-1, limit, limit+1, limit*2):
p = {'limit':l}
if l <= limit:
self.assert_(len(self.env.account.containers(parms=p)) <= l)
self.assert_status(200)
else:
self.assertRaises(ResponseError,
self.env.account.containers, parms=p)
self.assert_status(412)
def testContainerListing(self):
a = sorted([c.name for c in self.env.containers])
for format in [None, 'json', 'xml']:
b = self.env.account.containers(parms={'format':format})
if isinstance(b[0], dict):
b = [x['name'] for x in b]
self.assertEquals(a, b)
def testInvalidAuthToken(self):
hdrs = {'X-Auth-Token': 'bogus_auth_token'}
self.assertRaises(ResponseError, self.env.account.info, hdrs=hdrs)
self.assert_status(401)
def testLastContainerMarker(self):
for format in [None, 'json', 'xml']:
containers = self.env.account.containers({'format':format})
self.assertEquals(len(containers), len(self.env.containers))
self.assert_status(200)
containers = self.env.account.containers(
parms={'format':format,'marker':containers[-1]})
self.assertEquals(len(containers), 0)
if format is None:
self.assert_status(204)
else:
self.assert_status(200)
def testMarkerLimitContainerList(self):
for format in [None, 'json', 'xml']:
for marker in ['0', 'A', 'I', 'R', 'Z', 'a', 'i', 'r', 'z', \
'abc123', 'mnop', 'xyz']:
limit = random.randint(2, 9)
containers = self.env.account.containers(
parms={'format':format, 'marker':marker, 'limit':limit})
self.assert_(len(containers) <= limit)
if containers:
if isinstance(containers[0], dict):
containers = [x['name'] for x in containers]
self.assert_(locale.strcoll(containers[0], marker) > 0)
def testContainersOrderedByName(self):
for format in [None, 'json', 'xml']:
containers = self.env.account.containers(
parms={'format':format})
if isinstance(containers[0], dict):
containers = [x['name'] for x in containers]
self.assertEquals(sorted(containers, cmp=locale.strcoll),
containers)
class TestAccountUTF8(Base2, TestAccount):
set_up = False
class TestAccountNoContainersEnv:
@classmethod
def setUp(cls):
cls.conn = Connection(config)
cls.conn.authenticate()
cls.account = Account(cls.conn, config.get('account',
config['username']))
cls.account.delete_containers()
class TestAccountNoContainers(Base):
env = TestAccountNoContainersEnv
set_up = False
def testGetRequest(self):
for format in [None, 'json', 'xml']:
self.assert_(not self.env.account.containers(
parms={'format':format}))
if format is None:
self.assert_status(204)
else:
self.assert_status(200)
class TestAccountNoContainersUTF8(Base2, TestAccountNoContainers):
set_up = False
class TestContainerEnv:
@classmethod
def setUp(cls):
cls.conn = Connection(config)
cls.conn.authenticate()
cls.account = Account(cls.conn, config.get('account',
config['username']))
cls.account.delete_containers()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_count = 10
cls.file_size = 128
cls.files = list()
for x in range(cls.file_count):
file = cls.container.file(Utils.create_name())
file.write_random(cls.file_size)
cls.files.append(file.name)
class TestContainerDev(Base):
env = TestContainerEnv
set_up = False
class TestContainerDevUTF8(Base2, TestContainerDev):
set_up = False
class TestContainer(Base):
env = TestContainerEnv
set_up = False
def testContainerNameLimit(self):
limit = 256
for l in (limit-100, limit-10, limit-1, limit,
limit+1, limit+10, limit+100):
cont = self.env.account.container('a'*l)
if l <= limit:
self.assert_(cont.create())
self.assert_status(201)
else:
self.assert_(not cont.create())
self.assert_status(400)
def testFileThenContainerDelete(self):
cont = self.env.account.container(Utils.create_name())
self.assert_(cont.create())
file = cont.file(Utils.create_name())
self.assert_(file.write_random())
self.assert_(file.delete())
self.assert_status(204)
self.assert_(file.name not in cont.files())
self.assert_(cont.delete())
self.assert_status(204)
self.assert_(cont.name not in self.env.account.containers())
def testFileListingLimitMarkerPrefix(self):
cont = self.env.account.container(Utils.create_name())
self.assert_(cont.create())
files = sorted([Utils.create_name() for x in xrange(10)])
for f in files:
file = cont.file(f)
self.assert_(file.write_random())
for i in xrange(len(files)):
f = files[i]
for j in xrange(1, len(files)-i):
self.assert_(cont.files(parms={'limit':j, 'marker':f}) == files[i+1:i+j+1])
self.assert_(cont.files(parms={'marker':f}) == files[i+1:])
self.assert_(cont.files(parms={'marker': f, 'prefix':f}) == [])
self.assert_(cont.files(parms={'prefix': f}) == [f])
def testPrefixAndLimit(self):
cont = self.env.account.container(Utils.create_name())
self.assert_(cont.create())
prefix_file_count = 10
limit_count = 2
prefixs = ['alpha/', 'beta/', 'kappa/']
prefix_files = {}
all_files = []
for prefix in prefixs:
prefix_files[prefix] = []
for i in range(prefix_file_count):
file = cont.file(prefix + Utils.create_name())
file.write()
prefix_files[prefix].append(file.name)
for format in [None, 'json', 'xml']:
for prefix in prefixs:
files = cont.files(parms={'prefix':prefix})
self.assertEquals(files, sorted(prefix_files[prefix]))
for format in [None, 'json', 'xml']:
for prefix in prefixs:
files = cont.files(parms={'limit':limit_count,
'prefix':prefix})
self.assertEquals(len(files), limit_count)
for file in files:
self.assert_(file.startswith(prefix))
def testCreate(self):
cont = self.env.account.container(Utils.create_name())
self.assert_(cont.create())
self.assert_status(201)
self.assert_(cont.name in self.env.account.containers())
def testContainerFileListOnContainerThatDoesNotExist(self):
for format in [None, 'json', 'xml']:
container = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, container.files,
parms={'format':format})
self.assert_status(404)
def testUtf8Container(self):
valid_utf8 = Utils.create_utf8_name()
invalid_utf8 = valid_utf8[::-1]
container = self.env.account.container(valid_utf8)
self.assert_(container.create(cfg={'no_path_quote':True}))
self.assert_(container.name in self.env.account.containers())
self.assertEquals(container.files(), [])
self.assert_(container.delete())
container = self.env.account.container(invalid_utf8)
self.assert_(not container.create(cfg={'no_path_quote':True}))
self.assert_status(412)
self.assertRaises(ResponseError, container.files,
cfg={'no_path_quote':True})
self.assert_status(412)
def testCreateOnExisting(self):
cont = self.env.account.container(Utils.create_name())
self.assert_(cont.create())
self.assert_status(201)
self.assert_(cont.create())
self.assert_status(202)
def testSlashInName(self):
if Utils.create_name == Utils.create_utf8_name:
cont_name = list(unicode(Utils.create_name(), 'utf-8'))
else:
cont_name = list(Utils.create_name())
cont_name[random.randint(2, len(cont_name)-2)] = '/'
cont_name = ''.join(cont_name)
if Utils.create_name == Utils.create_utf8_name:
cont_name = cont_name.encode('utf-8')
cont = self.env.account.container(cont_name)
self.assert_(not cont.create(cfg={'no_path_quote':True}),
'created container with name %s' % (cont_name))
self.assert_status(404)
self.assert_(cont.name not in self.env.account.containers())
def testDelete(self):
cont = self.env.account.container(Utils.create_name())
self.assert_(cont.create())
self.assert_status(201)
self.assert_(cont.delete())
self.assert_status(204)
self.assert_(cont.name not in self.env.account.containers())
def testDeleteOnContainerThatDoesNotExist(self):
cont = self.env.account.container(Utils.create_name())
self.assert_(not cont.delete())
self.assert_status(404)
def testDeleteOnContainerWithFiles(self):
cont = self.env.account.container(Utils.create_name())
self.assert_(cont.create())
file = cont.file(Utils.create_name())
file.write_random(self.env.file_size)
self.assert_(file.name in cont.files())
self.assert_(not cont.delete())
self.assert_status(409)
def testFileCreateInContainerThatDoesNotExist(self):
file = File(self.env.conn, self.env.account, Utils.create_name(),
Utils.create_name())
self.assertRaises(ResponseError, file.write)
self.assert_status(404)
def testLastFileMarker(self):
for format in [None, 'json', 'xml']:
files = self.env.container.files({'format':format})
self.assertEquals(len(files), len(self.env.files))
self.assert_status(200)
files = self.env.container.files(
parms={'format':format,'marker':files[-1]})
self.assertEquals(len(files), 0)
if format is None:
self.assert_status(204)
else:
self.assert_status(200)
def testContainerFileList(self):
for format in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format':format})
self.assert_status(200)
if isinstance(files[0], dict):
files = [x['name'] for x in files]
for file in self.env.files:
self.assert_(file in files)
for file in files:
self.assert_(file in self.env.files)
def testMarkerLimitFileList(self):
for format in [None, 'json', 'xml']:
for marker in ['0', 'A', 'I', 'R', 'Z', 'a', 'i', 'r', 'z', \
'abc123', 'mnop', 'xyz']:
limit = random.randint(2, self.env.file_count-1)
files = self.env.container.files(parms={'format':format, \
'marker':marker, 'limit':limit})
if not files:
continue
if isinstance(files[0], dict):
files = [x['name'] for x in files]
self.assert_(len(files) <= limit)
if files:
if isinstance(files[0], dict):
files = [x['name'] for x in files]
self.assert_(locale.strcoll(files[0], marker) > 0)
def testFileOrder(self):
for format in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format':format})
if isinstance(files[0], dict):
files = [x['name'] for x in files]
self.assertEquals(sorted(files, cmp=locale.strcoll), files)
def testContainerInfo(self):
info = self.env.container.info()
self.assert_status(204)
self.assertEquals(info['object_count'], self.env.file_count)
self.assertEquals(info['bytes_used'],
self.env.file_count*self.env.file_size)
def testContainerInfoOnContainerThatDoesNotExist(self):
container = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, container.info)
self.assert_status(404)
def testContainerFileListWithLimit(self):
for format in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format':format,
'limit':2})
self.assertEquals(len(files), 2)
def testTooLongName(self):
cont = self.env.account.container('x'*257)
self.assert_(not cont.create(), 'created container with name %s' % \
(cont.name))
self.assert_status(400)
def testContainerExistenceCachingProblem(self):
cont = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, cont.files)
self.assert_(cont.create())
cont.files()
cont = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, cont.files)
self.assert_(cont.create())
file = cont.file(Utils.create_name())
file.write_random()
class TestContainerUTF8(Base2, TestContainer):
set_up = False
class TestContainerPathsEnv:
@classmethod
def setUp(cls):
cls.conn = Connection(config)
cls.conn.authenticate()
cls.account = Account(cls.conn, config.get('account',
config['username']))
cls.account.delete_containers()
cls.file_size = 8
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.files = [
'/file1',
'/file A',
'/dir1/',
'/dir2/',
'/dir1/file2',
'/dir1/subdir1/',
'/dir1/subdir2/',
'/dir1/subdir1/file2',
'/dir1/subdir1/file3',
'/dir1/subdir1/file4',
'/dir1/subdir1/subsubdir1/',
'/dir1/subdir1/subsubdir1/file5',
'/dir1/subdir1/subsubdir1/file6',
'/dir1/subdir1/subsubdir1/file7',
'/dir1/subdir1/subsubdir1/file8',
'/dir1/subdir1/subsubdir2/',
'/dir1/subdir1/subsubdir2/file9',
'/dir1/subdir1/subsubdir2/file0',
'file1',
'dir1/',
'dir2/',
'dir1/file2',
'dir1/subdir1/',
'dir1/subdir2/',
'dir1/subdir1/file2',
'dir1/subdir1/file3',
'dir1/subdir1/file4',
'dir1/subdir1/subsubdir1/',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file6',
'dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file8',
'dir1/subdir1/subsubdir2/',
'dir1/subdir1/subsubdir2/file9',
'dir1/subdir1/subsubdir2/file0',
'dir1/subdir with spaces/',
'dir1/subdir with spaces/file B',
'dir1/subdir+with{whatever/',
'dir1/subdir+with{whatever/file D',
]
for f in cls.files:
file = cls.container.file(f)
if f.endswith('/'):
file.write(hdrs={'content-type': 'application/directory'})
else:
file.write_random(cls.file_size, hdrs={'content-type': \
'application/directory'})
class TestContainerPaths(Base):
env = TestContainerPathsEnv
set_up = False
def testTraverseContainer(self):
found_files = []
found_dirs = []
def recurse_path(path, count=0):
if count > 10:
raise ValueError('too deep recursion')
for file in self.env.container.files(parms={'path':path}):
self.assert_(file.startswith(path))
if file.endswith('/'):
recurse_path(file, count + 1)
found_dirs.append(file)
else:
found_files.append(file)
recurse_path('')
for file in self.env.files:
if file.startswith('/'):
self.assert_(file not in found_dirs)
self.assert_(file not in found_files)
elif file.endswith('/'):
self.assert_(file in found_dirs)
self.assert_(file not in found_files)
else:
self.assert_(file in found_files)
self.assert_(file not in found_dirs)
found_files = []
found_dirs = []
recurse_path('/')
for file in self.env.files:
if not file.startswith('/'):
self.assert_(file not in found_dirs)
self.assert_(file not in found_files)
elif file.endswith('/'):
self.assert_(file in found_dirs)
self.assert_(file not in found_files)
else:
self.assert_(file in found_files)
self.assert_(file not in found_dirs)
def testContainerListing(self):
for format in (None, 'json', 'xml'):
files = self.env.container.files(parms={'format':format})
if isinstance(files[0], dict):
files = [str(x['name']) for x in files]
self.assertEquals(files, sorted(self.env.files))
for format in ('json', 'xml'):
for file in self.env.container.files(parms={'format':format}):
self.assert_(int(file['bytes']) >= 0)
self.assert_(file.has_key('last_modified'))
if file['name'].endswith('/'):
self.assertEquals(file['content_type'],
'application/directory')
def testStructure(self):
def assert_listing(path, list):
files = self.env.container.files(parms={'path':path})
self.assertEquals(sorted(list, cmp=locale.strcoll), files)
assert_listing('/', ['/dir1/', '/dir2/', '/file1', '/file A'])
assert_listing('/dir1',
['/dir1/file2', '/dir1/subdir1/', '/dir1/subdir2/'])
assert_listing('/dir1/',
['/dir1/file2', '/dir1/subdir1/', '/dir1/subdir2/'])
assert_listing('/dir1/subdir1',
['/dir1/subdir1/subsubdir2/', '/dir1/subdir1/file2',
'/dir1/subdir1/file3', '/dir1/subdir1/file4',
'/dir1/subdir1/subsubdir1/'])
assert_listing('/dir1/subdir2', [])
assert_listing('', ['file1', 'dir1/', 'dir2/'])
assert_listing('dir1', ['dir1/file2', 'dir1/subdir1/',
'dir1/subdir2/', 'dir1/subdir with spaces/',
'dir1/subdir+with{whatever/'])
assert_listing('dir1/subdir1',
['dir1/subdir1/file4', 'dir1/subdir1/subsubdir2/',
'dir1/subdir1/file2', 'dir1/subdir1/file3',
'dir1/subdir1/subsubdir1/'])
assert_listing('dir1/subdir1/subsubdir1',
['dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file8',
'dir1/subdir1/subsubdir1/file6'])
assert_listing('dir1/subdir1/subsubdir1/',
['dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file8',
'dir1/subdir1/subsubdir1/file6'])
assert_listing('dir1/subdir with spaces/',
['dir1/subdir with spaces/file B'])
class TestFileEnv:
@classmethod
def setUp(cls):
cls.conn = Connection(config)
cls.conn.authenticate()
cls.account = Account(cls.conn, config.get('account',
config['username']))
cls.account.delete_containers()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_size = 128
class TestFileDev(Base):
env = TestFileEnv
set_up = False
class TestFileDevUTF8(Base2, TestFileDev):
set_up = False
class TestFile(Base):
env = TestFileEnv
set_up = False
def testCopy(self):
# makes sure to test encoded characters"
source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
file = self.env.container.file(source_filename)
metadata = {}
for i in range(1):
metadata[Utils.create_name()] = Utils.create_name()
data = file.write_random()
file.sync_metadata(metadata)
dest_cont = self.env.account.container(Utils.create_name())
self.assert_(dest_cont.create())
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file = self.env.container.file(source_filename)
file.copy('%s%s' % (prefix, cont), dest_filename)
self.assert_(dest_filename in cont.files())
file = cont.file(dest_filename)
self.assert_(data == file.read())
self.assert_(file.initialize())
self.assert_(metadata == file.metadata)
def testCopy404s(self):
source_filename = Utils.create_name()
file = self.env.container.file(source_filename)
file.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assert_(dest_cont.create())
for prefix in ('', '/'):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file = source_cont.file(source_filename)
self.assert_(not file.copy('%s%s' % (prefix, self.env.container),
Utils.create_name()))
self.assert_status(404)
self.assert_(not file.copy('%s%s' % (prefix, dest_cont),
Utils.create_name()))
self.assert_status(404)
# invalid source object
file = self.env.container.file(Utils.create_name())
self.assert_(not file.copy('%s%s' % (prefix, self.env.container),
Utils.create_name()))
self.assert_status(404)
self.assert_(not file.copy('%s%s' % (prefix, dest_cont),
Utils.create_name()))
self.assert_status(404)
# invalid destination container
file = self.env.container.file(source_filename)
self.assert_(not file.copy('%s%s' % (prefix, Utils.create_name()),
Utils.create_name()))
def testCopyNoDestinationHeader(self):
source_filename = Utils.create_name()
file = self.env.container.file(source_filename)
file.write_random()
file = self.env.container.file(source_filename)
self.assert_(not file.copy(Utils.create_name(), Utils.create_name(),
cfg={'no_destination': True}))
self.assert_status(412)
def testCopyDestinationSlashProblems(self):
source_filename = Utils.create_name()
file = self.env.container.file(source_filename)
file.write_random()
# no slash
self.assert_(not file.copy(Utils.create_name(), Utils.create_name(),
cfg={'destination': Utils.create_name()}))
self.assert_status(412)
def testCopyFromHeader(self):
source_filename = Utils.create_name()
file = self.env.container.file(source_filename)
metadata = {}
for i in range(1):
metadata[Utils.create_name()] = Utils.create_name()
file.metadata = metadata
data = file.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assert_(dest_cont.create())
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file = cont.file(dest_filename)
file.write(hdrs={'X-Copy-From': '%s%s/%s' % (prefix,
self.env.container.name, source_filename)})
self.assert_(dest_filename in cont.files())
file = cont.file(dest_filename)
self.assert_(data == file.read())
self.assert_(file.initialize())
self.assert_(metadata == file.metadata)
def testCopyFromHeader404s(self):
source_filename = Utils.create_name()
file = self.env.container.file(source_filename)
file.write_random()
for prefix in ('', '/'):
# invalid source container
file = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file.write,
hdrs={'X-Copy-From': '%s%s/%s' % (prefix,
Utils.create_name(), source_filename)})
self.assert_status(404)
# invalid source object
file = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file.write,
hdrs={'X-Copy-From': '%s%s/%s' % (prefix,
self.env.container.name, Utils.create_name())})
self.assert_status(404)
# invalid destination container
dest_cont = self.env.account.container(Utils.create_name())
file = dest_cont.file(Utils.create_name())
self.assertRaises(ResponseError, file.write,
hdrs={'X-Copy-From': '%s%s/%s' % (prefix,
self.env.container.name, source_filename)})
self.assert_status(404)
def testNameLimit(self):
limit = 1024
for l in (1, 10, limit/2, limit-1, limit, limit+1, limit*2):
file = self.env.container.file('a'*l)
if l <= limit:
self.assert_(file.write())
self.assert_status(201)
else:
self.assertRaises(ResponseError, file.write)
self.assert_status(400)
def testQuestionMarkInName(self):
if Utils.create_name == Utils.create_ascii_name:
file_name = list(Utils.create_name())
file_name[random.randint(2, len(file_name)-2)] = '?'
file_name = "".join(file_name)
else:
file_name = Utils.create_name(6) + '?' + Utils.create_name(6)
file = self.env.container.file(file_name)
self.assert_(file.write(cfg={'no_path_quote':True}))
self.assert_(file_name not in self.env.container.files())
self.assert_(file_name.split('?')[0] in self.env.container.files())
def testDeleteThen404s(self):
file = self.env.container.file(Utils.create_name())
self.assert_(file.write_random())
self.assert_status(201)
self.assert_(file.delete())
self.assert_status(204)
file.metadata = {Utils.create_name(): Utils.create_name()}
for method in (file.info, file.read, file.sync_metadata, \
file.delete):
self.assertRaises(ResponseError, method)
self.assert_status(404)
def testBlankMetadataName(self):
file = self.env.container.file(Utils.create_name())
file.metadata = {'': Utils.create_name()}
self.assertRaises(ResponseError, file.write_random)
self.assert_status(400)
def testMetadataNumberLimit(self):
number_limit = 90
for i in (number_limit-10, number_limit-1, number_limit,
number_limit+1, number_limit+10, number_limit+100):
size_limit = 4096
j = size_limit/(i * 2)
size = 0
metadata = {}
while len(metadata.keys()) < i:
key = Utils.create_name()
val = Utils.create_name()
if len(key) > j:
key = key[:j]
val = val[:j]
size += len(key) + len(val)
metadata[key] = val
file = self.env.container.file(Utils.create_name())
file.metadata = metadata
if i <= number_limit:
self.assert_(file.write())
self.assert_status(201)
self.assert_(file.sync_metadata())
self.assert_status((201, 202))
else:
self.assertRaises(ResponseError, file.write)
self.assert_status(400)
file.metadata = {}
self.assert_(file.write())
self.assert_status(201)
file.metadata = metadata
self.assertRaises(ResponseError, file.sync_metadata)
self.assert_status(400)
def testContentTypeGuessing(self):
file_types = {'wav': 'audio/x-wav', 'txt': 'text/plain',
'zip': 'application/zip'}
container = self.env.account.container(Utils.create_name())
self.assert_(container.create())
for i in file_types.keys():
file = container.file(Utils.create_name() + '.' + i)
file.write('', cfg={'no_content_type':True})
file_types_read = {}
for i in container.files(parms={'format': 'json'}):
file_types_read[i['name'].split('.')[1]] = i['content_type']
self.assertEquals(file_types, file_types_read)
def testRangedGets(self):
file_length = 10000
range_size = file_length/10
file = self.env.container.file(Utils.create_name())
data = file.write_random(file_length)
for i in range(0, file_length, range_size):
range_string = 'bytes=%d-%d' % (i, i+range_size-1)
hdrs = {'Range': range_string}
self.assert_(data[i:i+range_size] == file.read(hdrs=hdrs),
range_string)
range_string = 'bytes=-%d' % (i)
hdrs = {'Range': range_string}
self.assert_(file.read(hdrs=hdrs) == data[-i:], range_string)
range_string = 'bytes=%d-' % (i)
hdrs = {'Range': range_string}
self.assert_(file.read(hdrs=hdrs) == data[i-file_length:],
range_string)
range_string = 'bytes=%d-%d' % (file_length+1000, file_length+2000)
hdrs = {'Range': range_string}
self.assertRaises(ResponseError, file.read, hdrs=hdrs)
self.assert_status(416)
range_string = 'bytes=%d-%d' % (file_length-1000, file_length+2000)
hdrs = {'Range': range_string}
self.assert_(file.read(hdrs=hdrs) == data[-1000:], range_string)
hdrs = {'Range': '0-4'}
self.assert_(file.read(hdrs=hdrs) == data, range_string)
for r in ('BYTES=0-999', 'bytes = 0-999', 'BYTES = 0 - 999',
'bytes = 0 - 999', 'bytes=0 - 999', 'bytes=0-999 '):
self.assert_(file.read(hdrs={'Range': r}) == data[0:1000])
def testFileSizeLimit(self):
limit = 5*2**30 + 2
tsecs = 3
for i in (limit-100, limit-10, limit-1, limit, limit+1, limit+10,
limit+100):
file = self.env.container.file(Utils.create_name())
if i <= limit:
self.assert_(timeout(tsecs, file.write,
cfg={'set_content_length':i}))
else:
self.assertRaises(ResponseError, timeout, tsecs,
file.write, cfg={'set_content_length':i})
def testNoContentLengthForPut(self):
file = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file.write, 'testing',
cfg={'no_content_length':True})
self.assert_status(411)
def testDelete(self):
file = self.env.container.file(Utils.create_name())
file.write_random(self.env.file_size)
self.assert_(file.name in self.env.container.files())
self.assert_(file.delete())
self.assert_(file.name not in self.env.container.files())
def testBadHeaders(self):
file_length = 100
# no content type on puts should be ok
file = self.env.container.file(Utils.create_name())
file.write_random(file_length, cfg={'no_content_type':True})
self.assert_status(201)
# content length x
self.assertRaises(ResponseError, file.write_random, file_length,
hdrs={'Content-Length':'X'}, cfg={'no_content_length':True})
self.assert_status(400)
# bad request types
#for req in ('LICK', 'GETorHEAD_base', 'container_info', 'best_response'):
for req in ('LICK', 'GETorHEAD_base'):
self.env.account.conn.make_request(req)
self.assert_status(405)
# bad range headers
self.assert_(len(file.read(hdrs={'Range':'parsecs=8-12'})) == \
file_length)
self.assert_status(200)
def testMetadataLengthLimits(self):
key_limit, value_limit = 128, 256
lengths = [[key_limit, value_limit], [key_limit, value_limit+1], \
[key_limit+1, value_limit], [key_limit, 0], \
[key_limit, value_limit*10], [key_limit*10, value_limit]]
for l in lengths:
metadata = {'a'*l[0]: 'b'*l[1]}
file = self.env.container.file(Utils.create_name())
file.metadata = metadata
if l[0] <= key_limit and l[1] <= value_limit:
self.assert_(file.write())
self.assert_status(201)
self.assert_(file.sync_metadata())
else:
self.assertRaises(ResponseError, file.write)
self.assert_status(400)
file.metadata = {}
self.assert_(file.write())
self.assert_status(201)
file.metadata = metadata
self.assertRaises(ResponseError, file.sync_metadata)
self.assert_status(400)
def testEtagWayoff(self):
file = self.env.container.file(Utils.create_name())
hdrs = {'etag': 'reallylonganddefinitelynotavalidetagvalue'}
self.assertRaises(ResponseError, file.write_random, hdrs=hdrs)
self.assert_status(422)
def testFileCreate(self):
for i in range(10):
file = self.env.container.file(Utils.create_name())
data = file.write_random()
self.assert_status(201)
self.assert_(data == file.read())
self.assert_status(200)
def testHead(self):
file_name = Utils.create_name()
content_type = Utils.create_name()
file = self.env.container.file(file_name)
file.content_type = content_type
file.write_random(self.env.file_size)
md5 = file.md5
file = self.env.container.file(file_name)
info = file.info()
self.assert_status(200)
self.assertEquals(info['content_length'], self.env.file_size)
self.assertEquals(info['etag'], md5)
self.assertEquals(info['content_type'], content_type)
self.assert_(info.has_key('last_modified'))
def testDeleteOfFileThatDoesNotExist(self):
# in container that exists
file = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file.delete)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file = container.file(Utils.create_name())
self.assertRaises(ResponseError, file.delete)
self.assert_status(404)
def testHeadOnFileThatDoesNotExist(self):
# in container that exists
file = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file.info)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file = container.file(Utils.create_name())
self.assertRaises(ResponseError, file.info)
self.assert_status(404)
def testMetadataOnPost(self):
file = self.env.container.file(Utils.create_name())
file.write_random(self.env.file_size)
for i in range(10):
metadata = {}
for i in range(10):
metadata[Utils.create_name()] = Utils.create_name()
file.metadata = metadata
self.assert_(file.sync_metadata())
self.assert_status((201, 202))
file = self.env.container.file(file.name)
self.assert_(file.initialize())
self.assert_status(200)
self.assertEquals(file.metadata, metadata)
def testGetContentType(self):
file_name = Utils.create_name()
content_type = Utils.create_name()
file = self.env.container.file(file_name)
file.content_type = content_type
file.write_random()
file = self.env.container.file(file_name)
file.read()
self.assertEquals(content_type, file.content_type)
def testGetOnFileThatDoesNotExist(self):
# in container that exists
file = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file.read)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file = container.file(Utils.create_name())
self.assertRaises(ResponseError, file.read)
self.assert_status(404)
def testPostOnFileThatDoesNotExist(self):
# in container that exists
file = self.env.container.file(Utils.create_name())
file.metadata['Field'] = 'Value'
self.assertRaises(ResponseError, file.sync_metadata)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file = container.file(Utils.create_name())
file.metadata['Field'] = 'Value'
self.assertRaises(ResponseError, file.sync_metadata)
self.assert_status(404)
def testMetadataOnPut(self):
for i in range(10):
metadata = {}
for j in range(10):
metadata[Utils.create_name()] = Utils.create_name()
file = self.env.container.file(Utils.create_name())
file.metadata = metadata
file.write_random(self.env.file_size)
file = self.env.container.file(file.name)
self.assert_(file.initialize())
self.assert_status(200)
self.assertEquals(file.metadata, metadata)
def testSerialization(self):
container = self.env.account.container(Utils.create_name())
self.assert_(container.create())
files = []
for i in (0, 1, 10, 100, 1000, 10000):
files.append({'name': Utils.create_name(), \
'content_type': Utils.create_name(), 'bytes':i})
write_time = time.time()
for f in files:
file = container.file(f['name'])
file.content_type = f['content_type']
file.write_random(f['bytes'])
f['hash'] = file.md5
f['json'] = False
f['xml'] = False
write_time = time.time() - write_time
for format in ['json', 'xml']:
for file in container.files(parms={'format': format}):
found = False
for f in files:
if f['name'] != file['name']:
continue
self.assertEquals(file['content_type'],
f['content_type'])
self.assertEquals(int(file['bytes']), f['bytes'])
d = datetime.strptime(file['last_modified'].\
split('.')[0], "%Y-%m-%dT%H:%M:%S")
lm = time.mktime(d.timetuple())
if f.has_key('last_modified'):
self.assertEquals(f['last_modified'], lm)
else:
f['last_modified'] = lm
f[format] = True
found = True
self.assert_(found, 'Unexpected file %s found in ' \
'%s listing' % (file['name'], format))
headers = dict(self.env.conn.response.getheaders())
if format == 'json':
self.assertEquals(headers['content-type'],
'application/json; charset=utf-8')
elif format == 'xml':
self.assertEquals(headers['content-type'],
'application/xml; charset=utf-8')
lm_diff = max([f['last_modified'] for f in files]) - \
min([f['last_modified'] for f in files])
self.assert_(lm_diff < write_time + 1, 'Diff in last ' + \
'modified times should be less than time to write files')
for f in files:
for format in ['json', 'xml']:
self.assert_(f[format], 'File %s not found in %s listing' \
% (f['name'], format))
def testStackedOverwrite(self):
file = self.env.container.file(Utils.create_name())
for i in range(1, 11):
data = file.write_random(512)
file.write(data)
self.assert_(file.read() == data)
def testTooLongName(self):
file = self.env.container.file('x'*1025)
self.assertRaises(ResponseError, file.write)
self.assert_status(400)
def testZeroByteFile(self):
file = self.env.container.file(Utils.create_name())
self.assert_(file.write(''))
self.assert_(file.name in self.env.container.files())
self.assert_(file.read() == '')
def testEtagResponse(self):
file = self.env.container.file(Utils.create_name())
data = StringIO.StringIO(file.write_random(512))
etag = File.compute_md5sum(data)
headers = dict(self.env.conn.response.getheaders())
self.assert_('etag' in headers.keys())
header_etag = headers['etag'].strip('"')
self.assertEquals(etag, header_etag)
def testChunkedPut(self):
data = File.random_data(10000)
etag = File.compute_md5sum(data)
for i in (1, 10, 100, 1000):
file = self.env.container.file(Utils.create_name())
for j in chunks(data, i):
file.chunked_write(j)
self.assert_(file.chunked_write())
self.assert_(data == file.read())
info = file.info()
self.assertEquals(etag, info['etag'])
class TestFileUTF8(Base2, TestFile):
set_up = False
class TestFileComparisonEnv:
@classmethod
def setUp(cls):
cls.conn = Connection(config)
cls.conn.authenticate()
cls.account = Account(cls.conn, config.get('account',
config['username']))
cls.account.delete_containers()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_count = 20
cls.file_size = 128
cls.files = list()
for x in range(cls.file_count):
file = cls.container.file(Utils.create_name())
file.write_random(cls.file_size)
cls.files.append(file)
cls.time_old = time.asctime(time.localtime(time.time()-86400))
cls.time_new = time.asctime(time.localtime(time.time()+86400))
class TestFileComparison(Base):
env = TestFileComparisonEnv
set_up = False
def testIfMatch(self):
for file in self.env.files:
hdrs = {'If-Match': file.md5}
self.assert_(file.read(hdrs=hdrs))
hdrs = {'If-Match': 'bogus'}
self.assertRaises(ResponseError, file.read, hdrs=hdrs)
self.assert_status(412)
def testIfNoneMatch(self):
for file in self.env.files:
hdrs = {'If-None-Match': 'bogus'}
self.assert_(file.read(hdrs=hdrs))
hdrs = {'If-None-Match': file.md5}
self.assertRaises(ResponseError, file.read, hdrs=hdrs)
self.assert_status(304)
def testIfModifiedSince(self):
for file in self.env.files:
hdrs = {'If-Modified-Since': self.env.time_old}
self.assert_(file.read(hdrs=hdrs))
hdrs = {'If-Modified-Since': self.env.time_new}
self.assertRaises(ResponseError, file.read, hdrs=hdrs)
self.assert_status(304)
def testIfUnmodifiedSince(self):
for file in self.env.files:
hdrs = {'If-Unmodified-Since': self.env.time_new}
self.assert_(file.read(hdrs=hdrs))
hdrs = {'If-Unmodified-Since': self.env.time_old}
self.assertRaises(ResponseError, file.read, hdrs=hdrs)
self.assert_status(412)
def testIfMatchAndUnmodified(self):
for file in self.env.files:
hdrs = {'If-Match': file.md5, 'If-Unmodified-Since': \
self.env.time_new}
self.assert_(file.read(hdrs=hdrs))
hdrs = {'If-Match': 'bogus', 'If-Unmodified-Since': \
self.env.time_new}
self.assertRaises(ResponseError, file.read, hdrs=hdrs)
self.assert_status(412)
hdrs = {'If-Match': file.md5, 'If-Unmodified-Since': \
self.env.time_old}
self.assertRaises(ResponseError, file.read, hdrs=hdrs)
self.assert_status(412)
class TestFileComparisonUTF8(Base2, TestFileComparison):
set_up = False
if __name__ == '__main__':
unittest.main()
|
pvo/swift
|
test/functional/tests.py
|
Python
|
apache-2.0
| 53,723
|
'''
Created on Jun 6, 2013
@author: sean
'''
from __future__ import unicode_literals
# Standard library imports
from io import BytesIO, StringIO
import codecs
import logging
# Third party imports
import pkg_resources
from requests.packages.urllib3.filepost import choose_boundary, iter_fields
from requests.packages.urllib3.packages import six
import requests
encoder = codecs.lookup('utf-8')[0]
log = logging.getLogger('binstar.requests_ext')
def writer(lst):
encoder()
pass
try:
long
except NameError:
long = int
try:
unicode
except NameError:
unicode = str
def encode_multipart_formdata_stream(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, value) or (key, value, MIME type)
field tuples. The key is treated as the field name, and the value as
the body of the form-data bytes. If the value is a tuple of two
elements, then the first element is treated as the filename of the
form-data section and a suitable MIME type is guessed based on the
filename. If the value is a tuple of three elements, then the third
element is treated as an explicit MIME type of the form-data section.
Field names and filenames must be unicode.
:param boundary:
If not specified, then a random boundary will be generated using
:func:`mimetools.choose_boundary`.
"""
body = []
def body_write(item):
if isinstance(item, bytes):
item = BytesIO(item)
elif isinstance(item, (str, unicode)):
item = StringIO(item)
body.append(item)
body_write_encode = lambda item: body.append(BytesIO(item.encode('utf-8')))
if boundary is None:
boundary = choose_boundary()
for fieldname, value in iter_fields(fields):
body_write_encode('--%s\r\n' % (boundary))
if isinstance(value, tuple):
if len(value) == 3:
filename, data, content_type = value
else:
filename, data = value
from mimetypes import guess_type
content_type, _ = guess_type(filename)
if content_type is None:
content_type = 'application/octet-stream'
body_write_encode('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body_write_encode('Content-Type: %s\r\n\r\n' %
(content_type,))
else:
data = value
body_write_encode('Content-Disposition: form-data; name="%s"\r\n'
% (fieldname))
body_write(b'\r\n')
if isinstance(data, (int, long)):
data = str(data) # Backwards compatibility
if isinstance(data, six.text_type):
body_write_encode(data)
else:
body_write(data)
body_write(b'\r\n')
body_write_encode('--%s--\r\n' % (boundary))
content_type = 'multipart/form-data; boundary=%s' % (boundary)
return body, content_type
class MultiPartIO(object):
def __init__(self, body, callback=None):
self.to_read = body
self.have_read = []
self._total = 0
self.callback = callback
def read(self, n= -1):
if self.callback:
self.callback(self.tell(), self._total)
if n == -1:
return ''.join(fd.read() for fd in self.to_read)
if not self.to_read:
return ''
while self.to_read:
data = self.to_read[0].read(n)
if data:
return data
fd = self.to_read.pop(0)
self.have_read.append(fd)
return ''
def tell(self):
cursor = sum(fd.tell() for fd in self.have_read)
if self.to_read:
cursor += self.to_read[0].tell()
return cursor
def seek(self, pos, mode=0):
assert pos == 0
if mode is 0:
self.to_read = self.have_read + self.to_read
self.have_read = []
[fd.seek(pos, mode) for fd in self.to_read]
self.cursor = 0
elif mode is 2:
self.have_read = self.have_read + self.to_read
self.to_read = []
[fd.seek(pos, mode) for fd in self.have_read]
self._total = self.tell()
def stream_multipart(data, files=None, callback=None):
from itertools import chain
if files:
fields = chain(iter_fields(data), iter_fields(files))
else:
fields = data
body, content_type = encode_multipart_formdata_stream(fields)
data = MultiPartIO(body, callback=callback)
headers = {'Content-Type':content_type}
return data, headers
try:
import requests.packages.urllib3.contrib.pyopenssl
import OpenSSL.SSL
except ImportError:
HAS_OPENSSL = False
OpenSslError = None
else:
HAS_OPENSSL = True
OpenSslError = OpenSSL.SSL.Error
requests_version = pkg_resources.parse_version(requests.__version__)
# The first version that shipped urllib3 with issue shazow/urllib3#717
min_requests_version = pkg_resources.parse_version('2.8')
# TODO: add max_requests_version when requests ships with a fixed urllib3 to
# limit warning to broken versions
HAS_BROKEN_URLLIB3 = min_requests_version <= requests_version
def warn_openssl():
'''
Output a warning about requests incompatibility
'''
if HAS_OPENSSL and HAS_BROKEN_URLLIB3:
log.error(
'The version of requests you are using is incompatible with '
'PyOpenSSL. Please downgrade requests to requests==2.7.0 or '
'uninstall PyOpenSSL.\n'
'See https://github.com/anaconda-server/anaconda-client/issues/222 '
'for more details.')
|
GiovanniConserva/TestDeploy
|
venv/Lib/site-packages/binstar_client/requests_ext.py
|
Python
|
bsd-3-clause
| 5,894
|
# coding: utf-8
from __future__ import (absolute_import, division, print_function, unicode_literals)
import json
import logging
import boto3
from boto3.dynamodb.conditions import Key, Attr
import os
import sys
import re
# Path to modules needed to package local lambda function for upload
currentdir = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(currentdir, "./vendored"))
# Modules downloaded into the vendored directory
from netaddr import IPNetwork, IPAddress
# Logging for Serverless
log = logging.getLogger()
log.setLevel(logging.DEBUG)
# Initializing AWS services
sns = boto3.client('sns')
dynamodb = boto3.resource('dynamodb')
def handler(event, context):
log.debug("Received event {}".format(json.dumps(event)))
cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
accountIdFound = None
ipAddressFound = None
accountEmailAddressFound = None
# Header validation
try:
print('header:accountCbAlias', re.match("^[a-z]{3,4}-[a-z]{3,5}$", event['params']['header']['accountCbAlias']))
# Test if the accountCbAlias key exists
getCbInfo = cbInfo.get_item(
Key={
'accountCbAlias': event['params']['header']['accountCbAlias']
}
)
# Test if the value of accountCbAlias is valid, it will be if cbInfo returns an entry.
accountCbAlias = getCbInfo['Item']['accountCbAlias']
except Exception as e:
print(e)
print("regex not matching any values passed in request")
raise Exception({"code": "4000", "message": "ERROR: Bad request"})
# accountId validation
try:
if event['context']['resource-path'] == '/accounts' and event['params']['querystring']['accountid']:
if re.match("^[0-9]{12}$", event['params']['querystring']['accountid']) or \
re.match("^[0-9]{4}-[0-9]{4}-[0-9]{4}$", event['params']['querystring']['accountid']):
accountId = re.sub('-', '', event['params']['querystring']['accountid'])
accountIdFound = True
print('accoountIdFound', accountIdFound)
else:
accountIdFound = False
print('accoountIdFound', accountIdFound)
except KeyError as e:
print(e)
print("No accountId or bad accountId passed")
accountIdFound = False
print('accoountIdFound', accountIdFound)
# email address validation
try:
if event['context']['resource-path'] == '/accounts' and event['params']['querystring']['emailaddress']:
if re.match("^([a-zA-Z0-9_\-\.]+)@([a-zA-Z0-9_\-\.]+)\.([a-zA-Z]{2,5})$",
event['params']['querystring']['emailaddress']):
accountEmailAddress = event['params']['querystring']['emailaddress']
accountEmailAddressFound = True
print('accountEmailAddressFound', accountEmailAddressFound)
else:
accountEmailAddressFound = False
print('accountEmailAddressFound', accountEmailAddressFound)
except KeyError as e:
print(e)
print("No emailaddress or bad emailaddress passed")
accountEmailAddressFound = False
print('accountEmailAddressFound', accountEmailAddressFound)
# ip address validation
try:
if event['context']['resource-path'] == '/accounts' and event['params']['querystring']['ipaddress']:
if re.match("^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$",
event['params']['querystring']['ipaddress']):
ipAddress = event['params']['querystring']['ipaddress']
ipAddressFound = True
print('ipAddressFound', ipAddressFound)
else:
ipAddressFound = False
print('ipAddressFound', ipAddressFound)
except KeyError as e:
print(e)
print("No ipaddress or bad ipaddress passed")
ipAddressFound = False
print('ipAddressFound', ipAddressFound)
# test whether no query parameters were passed
if accountIdFound is False and accountEmailAddressFound is False and ipAddressFound is False:
raise Exception({"code": "4000", "message": "ERROR: Bad request"})
elif accountIdFound is True:
getAccountInfo = accountInfo.query(
IndexName='gsiAccountId',
KeyConditionExpression=Key('accountId').eq(accountId)
)
if getAccountInfo['Count'] >= 1:
return {'accountId': getAccountInfo['Items'][0]['accountId'],
'accountStatus': getAccountInfo['Items'][0]['accountStatus'],
'emailAddress': getAccountInfo['Items'][0]['accountEmailAddress'],
'regulated': getAccountInfo['Items'][0]['accountRegulated'],
'accountName': getAccountInfo['Items'][0]['accountTagLongProjectName'],
'costCenter': getAccountInfo['Items'][0]['accountTagCostCenter'],
'environment': getAccountInfo['Items'][0]['accountTagEnvironment'],
'department': getAccountInfo['Items'][0]['requestorDepartment'],
'requestorName': getAccountInfo['Items'][0]['requestorFullName'],
'technicalContactName': getAccountInfo['Items'][0]['accountTechnicalContactFullName']
}
elif getAccountInfo['Count'] == 0:
raise Exception({"code": "4040", "message": "ERROR: Not found"})
elif accountEmailAddressFound is True:
try:
getAccountInfo = accountInfo.get_item(
Key={
'accountEmailAddress': accountEmailAddress
}
)
return {'accountId': getAccountInfo['Item']['accountId'],
'accountStatus': getAccountInfo['Item']['accountStatus'],
'emailAddress': getAccountInfo['Item']['accountEmailAddress'],
'regulated': getAccountInfo['Item']['accountRegulated'],
'accountName': getAccountInfo['Item']['accountTagLongProjectName'],
'costCenter': getAccountInfo['Item']['accountTagCostCenter'],
'environment': getAccountInfo['Item']['accountTagEnvironment'],
'department': getAccountInfo['Item']['requestorDepartment'],
'requestorName': getAccountInfo['Item']['requestorFullName'],
'technicalContactName': getAccountInfo['Item']['accountTechnicalContactFullName']
}
except KeyError as e:
print(e)
print("No account found for given email address")
raise Exception({"code": "4040", "message": "ERROR: Not found"})
elif ipAddressFound is True:
getAccountInfo = accountInfo.scan(
ProjectionExpression='#accountVpcCidr,'
'accountId,accountEmailAddress,'
'accountRegulated,'
'accountStatus,'
'accountTagLongProjectName,'
'requestorFullName,'
'accountTechnicalContactFullName',
FilterExpression='attribute_exists (#accountVpcCidr)',
ExpressionAttributeNames={'#accountVpcCidr': 'accountVpcCidr'}
)
for i in getAccountInfo['Items']:
if i['accountVpcCidr']['us-west-1']:
if IPAddress(ipAddress) in IPNetwork(i['accountVpcCidr']['us-west-1']):
return {'accountId': i['accountId'],
'accountStatus': i['accountStatus'],
'emailAddress': i['accountEmailAddress'],
'regulated': i['accountRegulated'],
'accountName': i['accountTagLongProjectName'],
'requestorName': i['requestorFullName'],
'technicalContactName': i['accountTechnicalContactFullName'],
'vpcCidr': i['accountVpcCidr']
}
else:
pass
elif i['accountVpcCidr']['us-west-2']:
if IPAddress(ipAddress) in IPNetwork(i['accountVpcCidr']['us-west-2']):
return {'accountId': i['accountId'],
'accountStatus': i['accountStatus'],
'emailAddress': i['accountEmailAddress'],
'regulated': i['accountRegulated'],
'accountName': i['accountTagLongProjectName'],
'requestorName': i['requestorFullName'],
'technicalContactName': i['accountTechnicalContactFullName'],
'vpcCidr': i['accountVpcCidr']
}
else:
pass
elif i['accountVpcCidr']['us-east-1']:
if IPAddress(ipAddress) in IPNetwork(i['accountVpcCidr']['us-east-1']):
return {'accountId': i['accountId'],
'accountStatus': i['accountStatus'],
'emailAddress': i['accountEmailAddress'],
'regulated': i['accountRegulated'],
'accountName': i['accountTagLongProjectName'],
'requestorName': i['requestorFullName'],
'technicalContactName': i['accountTechnicalContactFullName'],
'vpcCidr': i['accountVpcCidr']
}
else:
pass
if event['context']['resource-path'] == '/accounts/ids':
getAccountInfo = accountInfo.scan(
ProjectionExpression='accountId',
FilterExpression=Attr('accountId').exists() & Attr('accountStatus').eq('ACTIVE')
)
accountIds = list()
for i in getAccountInfo['Items']:
accountIds.append(i['accountId'])
return {'accountCbAlias': accountCbAlias,
'accountIds': accountIds,
'count': getAccountInfo['Count']}
|
alanwill/aws-tailor
|
sam/functions/talr-inquirer/handler.py
|
Python
|
gpl-3.0
| 10,395
|
# -*- coding: utf-8 -*-
#----------------------------------------------------------
# ir_http modular http routing
#----------------------------------------------------------
import base64
import datetime
import hashlib
import logging
import mimetypes
import os
import re
import sys
import urllib2
import werkzeug
import werkzeug.exceptions
import werkzeug.routing
import werkzeug.urls
import werkzeug.utils
import odoo
from odoo import api, http, models, tools, SUPERUSER_ID
from odoo.exceptions import AccessDenied, AccessError
from odoo.http import request, STATIC_CACHE
from odoo.modules.module import get_resource_path, get_module_path
_logger = logging.getLogger(__name__)
UID_PLACEHOLDER = object()
class ModelConverter(werkzeug.routing.BaseConverter):
def __init__(self, url_map, model=False):
super(ModelConverter, self).__init__(url_map)
self.model = model
self.regex = r'([0-9]+)'
def to_python(self, value):
env = api.Environment(request.cr, UID_PLACEHOLDER, request.context)
return env[self.model].browse(int(value))
def to_url(self, value):
return value.id
class ModelsConverter(werkzeug.routing.BaseConverter):
def __init__(self, url_map, model=False):
super(ModelsConverter, self).__init__(url_map)
self.model = model
# TODO add support for slug in the form [A-Za-z0-9-] bla-bla-89 -> id 89
self.regex = r'([0-9,]+)'
def to_python(self, value):
env = api.Environment(request.cr, UID_PLACEHOLDER, request.context)
return env[self.model].browse(map(int, value.split(',')))
def to_url(self, value):
return ",".join(value.ids)
class SignedIntConverter(werkzeug.routing.NumberConverter):
regex = r'-?\d+'
num_convert = int
class IrHttp(models.AbstractModel):
_name = 'ir.http'
_description = "HTTP routing"
def _get_converters(self):
return {'model': ModelConverter, 'models': ModelsConverter, 'int': SignedIntConverter}
def _find_handler(self, return_rule=False):
return self.routing_map().bind_to_environ(request.httprequest.environ).match(return_rule=return_rule)
def _auth_method_user(self):
request.uid = request.session.uid
if not request.uid:
raise http.SessionExpiredException("Session expired")
def _auth_method_none(self):
request.uid = None
def _auth_method_public(self):
if not request.session.uid:
request.uid = request.env.ref('base.public_user').id
else:
request.uid = request.session.uid
def _authenticate(self, auth_method='user'):
try:
if request.session.uid:
try:
request.session.check_security()
# what if error in security.check()
# -> res_users.check()
# -> res_users.check_credentials()
except (AccessDenied, http.SessionExpiredException):
# All other exceptions mean undetermined status (e.g. connection pool full),
# let them bubble up
request.session.logout(keep_db=True)
if request.uid is None:
getattr(self, "_auth_method_%s" % auth_method)()
except (AccessDenied, http.SessionExpiredException, werkzeug.exceptions.HTTPException):
raise
except Exception:
_logger.info("Exception during request Authentication.", exc_info=True)
raise AccessDenied()
return auth_method
def _serve_attachment(self):
env = api.Environment(request.cr, SUPERUSER_ID, request.context)
domain = [('type', '=', 'binary'), ('url', '=', request.httprequest.path)]
fields = ['__last_update', 'datas', 'name', 'mimetype', 'checksum']
attach = env['ir.attachment'].search_read(domain, fields)
if attach:
wdate = attach[0]['__last_update']
datas = attach[0]['datas'] or ''
name = attach[0]['name']
checksum = attach[0]['checksum'] or hashlib.sha1(datas).hexdigest()
if (not datas and name != request.httprequest.path and
name.startswith(('http://', 'https://', '/'))):
return werkzeug.utils.redirect(name, 301)
response = werkzeug.wrappers.Response()
server_format = tools.DEFAULT_SERVER_DATETIME_FORMAT
try:
response.last_modified = datetime.datetime.strptime(wdate, server_format + '.%f')
except ValueError:
# just in case we have a timestamp without microseconds
response.last_modified = datetime.datetime.strptime(wdate, server_format)
response.set_etag(checksum)
response.make_conditional(request.httprequest)
if response.status_code == 304:
return response
response.mimetype = attach[0]['mimetype'] or 'application/octet-stream'
response.data = datas.decode('base64')
return response
def _handle_exception(self, exception):
# If handle_exception returns something different than None, it will be used as a response
# This is done first as the attachment path may
# not match any HTTP controller
if isinstance(exception, werkzeug.exceptions.HTTPException) and exception.code == 404:
attach = self._serve_attachment()
if attach:
return attach
# Don't handle exception but use werkeug debugger if server in --dev mode
if tools.config['dev_mode']:
raise
try:
return request._handle_exception(exception)
except AccessDenied:
return werkzeug.exceptions.Forbidden()
def _dispatch(self):
# locate the controller method
try:
rule, arguments = self._find_handler(return_rule=True)
func = rule.endpoint
except werkzeug.exceptions.NotFound, e:
return self._handle_exception(e)
# check authentication level
try:
auth_method = self._authenticate(func.routing["auth"])
except Exception as e:
return self._handle_exception(e)
processing = self._postprocess_args(arguments, rule)
if processing:
return processing
# set and execute handler
try:
request.set_handler(func, arguments, auth_method)
result = request.dispatch()
if isinstance(result, Exception):
raise result
except Exception, e:
return self._handle_exception(e)
return result
def _postprocess_args(self, arguments, rule):
""" post process arg to set uid on browse records """
for name, arg in arguments.items():
if isinstance(arg, models.BaseModel) and arg._uid is UID_PLACEHOLDER:
arguments[name] = arg.sudo(request.uid)
if not arg.exists():
return self._handle_exception(werkzeug.exceptions.NotFound())
def routing_map(self):
if not hasattr(self, '_routing_map'):
_logger.info("Generating routing map")
installed = request.registry._init_modules - {'web'}
if tools.config['test_enable']:
installed.add(odoo.modules.module.current_test)
mods = [''] + odoo.conf.server_wide_modules + sorted(installed)
self._routing_map = http.routing_map(mods, False, converters=self._get_converters())
return self._routing_map
def content_disposition(self, filename):
filename = tools.ustr(filename)
escaped = urllib2.quote(filename.encode('utf8'))
browser = request.httprequest.user_agent.browser
version = int((request.httprequest.user_agent.version or '0').split('.')[0])
if browser == 'msie' and version < 9:
return "attachment; filename=%s" % escaped
elif browser == 'safari' and version < 537:
return u"attachment; filename=%s" % filename.encode('ascii', 'replace')
else:
return "attachment; filename*=UTF-8''%s" % escaped
def binary_content(self, xmlid=None, model='ir.attachment', id=None, field='datas', unique=False, filename=None, filename_field='datas_fname', download=False, mimetype=None, default_mimetype='application/octet-stream', env=None):
""" Get file, attachment or downloadable content
If the ``xmlid`` and ``id`` parameter is omitted, fetches the default value for the
binary field (via ``default_get``), otherwise fetches the field for
that precise record.
:param str xmlid: xmlid of the record
:param str model: name of the model to fetch the binary from
:param int id: id of the record from which to fetch the binary
:param str field: binary field
:param bool unique: add a max-age for the cache control
:param str filename: choose a filename
:param str filename_field: if not create an filename with model-id-field
:param bool download: apply headers to download the file
:param str mimetype: mintype of the field (for headers)
:param str default_mimetype: default mintype if no mintype found
:param Environment env: by default use request.env
:returns: (status, headers, content)
"""
env = env or request.env
# get object and content
obj = None
if xmlid:
obj = env.ref(xmlid, False)
elif id and model in env.registry:
obj = env[model].browse(int(id))
# obj exists
if not obj or not obj.exists() or field not in obj:
return (404, [], None)
# check read access
try:
last_update = obj['__last_update']
except AccessError:
return (403, [], None)
status, headers, content = None, [], None
# attachment by url check
module_resource_path = None
if model == 'ir.attachment' and obj.type == 'url' and obj.url:
url_match = re.match("^/(\w+)/(.+)$", obj.url)
if url_match:
module = url_match.group(1)
module_path = get_module_path(module)
module_resource_path = get_resource_path(module, url_match.group(2))
if module_path and module_resource_path:
module_path = os.path.join(os.path.normpath(module_path), '') # join ensures the path ends with '/'
module_resource_path = os.path.normpath(module_resource_path)
if module_resource_path.startswith(module_path):
with open(module_resource_path, 'r') as f:
content = base64.b64encode(f.read())
last_update = str(os.path.getmtime(module_resource_path))
if not module_resource_path:
module_resource_path = obj.url
if not content:
status = 301
content = module_resource_path
else:
content = obj[field] or ''
# filename
if not filename:
if filename_field in obj:
filename = obj[filename_field]
elif module_resource_path:
filename = os.path.basename(module_resource_path)
else:
filename = "%s-%s-%s" % (obj._model._name, obj.id, field)
# mimetype
if not mimetype:
if 'mimetype' in obj and obj.mimetype and obj.mimetype != 'application/octet-stream':
mimetype = obj.mimetype
elif filename:
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
mimetype = default_mimetype
headers.append(('Content-Type', mimetype))
# cache
etag = hasattr(request, 'httprequest') and request.httprequest.headers.get('If-None-Match')
retag = hashlib.md5(last_update).hexdigest()
status = status or (304 if etag == retag else 200)
headers.append(('ETag', retag))
headers.append(('Cache-Control', 'max-age=%s' % (STATIC_CACHE if unique else 0)))
# content-disposition default name
if download:
headers.append(('Content-Disposition', self.content_disposition(filename)))
return (status, headers, content)
def convert_exception_to(to_type, with_message=False):
""" Should only be called from an exception handler. Fetches the current
exception data from sys.exc_info() and creates a new exception of type
``to_type`` with the original traceback.
If ``with_message`` is ``True``, sets the new exception's message to be
the stringification of the original exception. If ``False``, does not
set the new exception's message. Otherwise, uses ``with_message`` as the
new exception's message.
:type with_message: str|bool
"""
etype, original, tb = sys.exc_info()
try:
if with_message is False:
message = None
elif with_message is True:
message = str(original)
else:
message = str(with_message)
raise to_type, message, tb
except to_type as e:
return e
|
akhmadMizkat/odoo
|
openerp/addons/base/ir/ir_http.py
|
Python
|
gpl-3.0
| 13,358
|
#!/usr/bin/env python
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Read adapter data while decoding ext2 custom fields
"""
import sys
from json import dumps
from common.utils.json_format import MessageToDict
from voltha.protos import adapter_pb2
adapter = adapter_pb2.Adapter()
binary = sys.stdin.read()
adapter.ParseFromString(binary)
print dumps(MessageToDict(adapter, strict_any_handling=False))
|
opencord/voltha
|
experiments/extensions/read_ext2.py
|
Python
|
apache-2.0
| 956
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'mr.S'
from kernel.db import Base
from sqlalchemy import Column, Integer, String, ForeignKey
class ChatUser(Base):
__tablename__ = 's_chat_user'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('s_user.usr_id'))
chat_id = Column(Integer, ForeignKey('s_chat.id'))
|
s-tar/just-a-chat
|
entities/s_chat_user.py
|
Python
|
mit
| 367
|
'''
SASMOL: Copyright (C) 2011 Joseph E. Curtis, Ph.D.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from sasmol.test_sasmol.util import env, util
'''
sasio.Files.read_pdb seems not getting the moltype right
'''
from unittest import main
from mocker import Mocker, MockerTestCase, ANY, ARGS, KWARGS
import sasmol.sasmol as sasmol
import sasmol.sasop as sasop
import sasmol.sascalc as sascalc
import numpy, os, copy
import warnings; warnings.filterwarnings('ignore')
floattype=os.environ['SASSIE_FLOATTYPE']
DataPath = os.path.join(os.path.dirname(os.path.realpath(__file__)),'..','data','pdb_common')+os.path.sep
class Test_intg_sasmol_SasAtm_Type(MockerTestCase):
def setUp(self):
self.o=sasmol.SasAtm(3,'1CRN-3frames.pdb')
def test_energy(self):
expected = 10.0
self.o.setEnergy(expected)
result = self.o.energy()
self.assertEqual(expected, result)
def test_formula(self):
expected = 'fom'
self.o.setFormula(expected)
result = self.o.formula()
self.assertEqual(expected, result)
def test_mass(self):
expected = 100.0
self.o.setMass(expected)
result = self.o.mass()
self.assertEqual(expected, result)
def test_totalmass(self):
expected = 100.0
self.o.read_pdb(DataPath+'1ATM.pdb')
self.o.setTotalmass(expected)
result = self.o.totalmass()
self.assertEqual(expected, result)
def test_unitcell(self):
expected = 'P1'
self.o.setUnitcell(expected)
result = self.o.unitcell()
self.assertEqual(expected, result)
def test_com(self):
expected = [1.,2.,3.]
self.o.setCom(expected)
result = self.o.com()
self.assertEqual(expected, result)
def test_natoms(self):
expected = 100
self.o.setNatoms(expected)
result = self.o.natoms()
self.assertEqual(expected, result)
def test_rg(self):
expected = 100.0
self.o.setRg(expected)
result = self.o.rg()
self.assertEqual(expected, result)
def test_pmi(self):
expected = 100.0
self.o.setPmi(expected)
result = self.o.pmi()
self.assertEqual(expected, result)
def test_minimum(self):
expected = 1.0
self.o.setMinimum(expected)
result = self.o.minimum()
self.assertEqual(expected, result)
def test_maximum(self):
expected = 10000.0
self.o.setMaximum(expected)
result = self.o.maximum()
self.assertEqual(expected, result)
def test_shape(self):
expected = 'sphere'
self.o.setShape(expected)
result = self.o.shape()
self.assertEqual(expected, result)
def test_moltype(self):
expected = 'protein'
self.o.setMoltype(expected)
result = self.o.moltype()
self.assertEqual(expected, result)
def test_number_of_names(self):
expected = 100
self.o.setNumber_of_names(expected)
result = self.o.number_of_names()
self.assertEqual(expected, result)
def test_number_of_resnames(self):
expected = 100
self.o.setNumber_of_resnames(expected)
result = self.o.number_of_resnames()
self.assertEqual(expected, result)
def test_number_of_resids(self):
expected = 100
self.o.setNumber_of_resids(expected)
result = self.o.number_of_resids()
self.assertEqual(expected, result)
def test_number_of_chains(self):
expected = 100
self.o.setNumber_of_chains(expected)
result = self.o.number_of_chains()
self.assertEqual(expected, result)
def test_number_of_segnames(self):
expected = 100
self.o.setNumber_of_segnames(expected)
result = self.o.number_of_segnames()
self.assertEqual(expected, result)
def test_number_of_occupancies(self):
expected = 100
self.o.setNumber_of_occupancies(expected)
result = self.o.number_of_occupancies()
self.assertEqual(expected, result)
def test_number_of_betas(self):
expected = 100
self.o.setNumber_of_betas(expected)
result = self.o.number_of_betas()
self.assertEqual(expected, result)
def test_number_of_elements(self):
expected = 100
self.o.setNumber_of_elements(expected)
result = self.o.number_of_elements()
self.assertEqual(expected, result)
def test_names(self):
expected = ['C','N']
self.o.setNames(expected)
result = self.o.names()
self.assertEqual(expected, result)
def test_resnames(self):
expected = ['Ala','Phe']
self.o.setResnames(expected)
result = self.o.resnames()
self.assertEqual(expected, result)
def test_resids(self):
expected = [1,2,3]
self.o.setResids(expected)
result = self.o.resids()
self.assertEqual(expected, result)
def test_chains(self):
expected = ['A','B']
self.o.setChains(expected)
result = self.o.chains()
self.assertEqual(expected, result)
def test_segnames(self):
expected = ['A','A']
self.o.setSegnames(expected)
result = self.o.segnames()
self.assertEqual(expected, result)
def test_occupancies(self):
expected = ['1.0','1.0']
self.o.setOccupancies(expected)
result = self.o.occupancies()
self.assertEqual(expected, result)
def test_betas(self):
expected = [10.0,10.0]
self.o.setBetas(expected)
result = self.o.betas()
self.assertEqual(expected, result)
def test_elements(self):
expected = ['C','N']
self.o.setElements(expected)
result = self.o.elements()
self.assertEqual(expected, result)
def test_names_mask(self):
expected = [1,0,1]
self.o.setNames_mask(expected)
result = self.o.names_mask()
self.assertEqual(expected, result)
def test_resnames_mask(self):
expected = [1,0]
self.o.setResnames_mask(expected)
result = self.o.resnames_mask()
self.assertEqual(expected, result)
def test_resids_mask(self):
expected = [1,0]
self.o.setResids_mask(expected)
result = self.o.resids_mask()
self.assertEqual(expected, result)
def test_chains_mask(self):
expected = [1,0]
self.o.setChains_mask(expected)
result = self.o.chains_mask()
self.assertEqual(expected, result)
def test_occupanies_mask(self):
expected = [1,0]
self.o.setOccupancies_mask(expected)
result = self.o.occupancies_mask()
self.assertEqual(expected, result)
def test_betas_mask(self):
expected = [1,0]
self.o.setBetas_mask(expected)
result = self.o.betas_mask()
self.assertEqual(expected, result)
def test_elements_mask(self):
expected = [1,0]
self.o.setElements_mask(expected)
result = self.o.elements_mask()
self.assertEqual(expected, result)
def test_segnames_mask(self):
expected = [1,0]
self.o.setSegnames_mask(expected)
result = self.o.segnames_mask()
self.assertEqual(expected, result)
def tearDown(self):
pass
if __name__ == '__main__':
main()
|
madscatt/sasmol
|
src/python/test_sasmol/test_sasmol/test_intg_sasmol_SasAtm_miscellaneous.py
|
Python
|
gpl-3.0
| 7,692
|
import bee
from bee.segments import *
import libcontext
from libcontext.socketclasses import *
from libcontext.pluginclasses import *
from .matrix import matrix
import Spyder
matrix0 = matrix(Spyder.AxisSystem(), "AxisSystem")
class spawn_actor_or_entity(bee.worker):
actorclassname = antenna("pull", "id")
b_actorclassname = buffer("pull", "id")
connect(actorclassname, b_actorclassname)
v_actorname = variable("id")
v_matrix = variable(("object", "matrix"))
@modifier
def do_spawn(self):
try:
self.actorspawnfunc(self.b_actorclassname, self.v_actorname)
except KeyError:
self.entspawnfunc(self.b_actorclassname, self.v_actorname)
axis = self.v_matrix.get_copy("AxisSystem")
ent = self.get_entity(self.v_actorname)
ent.set_axissystem(axis)
ent.commit()
spawn_matrix = antenna("push", ("id", ("object", "matrix")))
uw = unweaver(("id", ("object", "matrix")), v_actorname, v_matrix)
connect(spawn_matrix, uw)
trigger(v_actorname, b_actorclassname, "input")
trigger(v_matrix, do_spawn, "input")
spawn = antenna("push", "id")
b_spawn = buffer("push", "id")
@modifier
def set_identity_matrix(self):
self.v_matrix = matrix0
connect(spawn, b_spawn)
connect(b_spawn, v_actorname)
trigger(b_spawn, set_identity_matrix, "input")
trigger(b_spawn, b_spawn, "input")
trigger(b_spawn, do_spawn, "input")
def set_actorspawnfunc(self, spawnfunc):
self.actorspawnfunc = spawnfunc
def set_entspawnfunc(self, spawnfunc):
self.entspawnfunc = spawnfunc
def set_get_entity(self, get_entity):
self.get_entity = get_entity
def place(self):
libcontext.socket(("get_entity", "AxisSystem"), socket_single_required(self.set_get_entity))
libcontext.socket(("spawn", "actor"), socket_single_required(self.set_actorspawnfunc))
libcontext.socket(("spawn", "entity"), socket_single_required(self.set_entspawnfunc))
|
agoose77/hivesystem
|
dragonfly/scene/spawn_actor_or_entity.py
|
Python
|
bsd-2-clause
| 2,030
|
###############################################################################
##
## Copyright (C) 2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.wamp import ApplicationSession
class Component(ApplicationSession):
"""
An application component calling the different backend procedures.
"""
@inlineCallbacks
def onJoin(self, details):
yield self.call(u'com.arguments.ping')
print("Pinged!")
res = yield self.call(u'com.arguments.add2', 2, 3)
print("Add2: {}".format(res))
starred = yield self.call(u'com.arguments.stars')
print("Starred 1: {}".format(starred))
starred = yield self.call(u'com.arguments.stars', nick = u'Homer')
print("Starred 2: {}".format(starred))
starred = yield self.call(u'com.arguments.stars', stars = 5)
print("Starred 3: {}".format(starred))
starred = yield self.call(u'com.arguments.stars', nick = u'Homer', stars = 5)
print("Starred 4: {}".format(starred))
orders = yield self.call(u'com.arguments.orders', u'coffee')
print("Orders 1: {}".format(orders))
orders = yield self.call(u'com.arguments.orders', u'coffee', limit = 10)
print("Orders 2: {}".format(orders))
arglengths = yield self.call(u'com.arguments.arglen')
print("Arglen 1: {}".format(arglengths))
arglengths = yield self.call(u'com.arguments.arglen', 1, 2, 3)
print("Arglen 1: {}".format(arglengths))
arglengths = yield self.call(u'com.arguments.arglen', a = 1, b = 2, c = 3)
print("Arglen 2: {}".format(arglengths))
arglengths = yield self.call(u'com.arguments.arglen', 1, 2, 3, a = 1, b = 2, c = 3)
print("Arglen 3: {}".format(arglengths))
self.leave()
def onDisconnect(self):
reactor.stop()
|
ahmedbodi/AutobahnPython
|
examples/twisted/wamp/basic/rpc/arguments/frontend.py
|
Python
|
apache-2.0
| 2,510
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_device_group import ApiParameters
from library.modules.bigip_device_group import ModuleParameters
from library.modules.bigip_device_group import ModuleManager
from library.modules.bigip_device_group import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_device_group import ApiParameters
from ansible.modules.network.f5.bigip_device_group import ModuleParameters
from ansible.modules.network.f5.bigip_device_group import ModuleManager
from ansible.modules.network.f5.bigip_device_group import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
save_on_auto_sync=True,
full_sync=False,
description="my description",
type="sync-failover",
auto_sync=True
)
p = ModuleParameters(params=args)
assert p.save_on_auto_sync is True
assert p.full_sync is False
assert p.description == "my description"
assert p.type == "sync-failover"
assert p.auto_sync is True
def test_api_parameters(self):
args = dict(
asmSync="disabled",
autoSync="enabled",
fullLoadOnSync="false",
incrementalConfigSyncSizeMax=1024,
networkFailover="disabled",
saveOnAutoSync="false",
type="sync-only"
)
p = ApiParameters(params=args)
assert p.auto_sync is True
assert p.full_sync is False
assert p.max_incremental_sync_size == 1024
assert p.save_on_auto_sync is False
assert p.type == 'sync-only'
class TestModuleManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_default_device_group(self, *args):
set_module_args(
dict(
name="foo-group",
state="present",
server='localhost',
user='admin',
password='password'
)
)
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.create_on_device = Mock(return_value=True)
mm.exists = Mock(return_value=False)
results = mm.exec_module()
assert results['changed'] is True
def test_update_device_group(self, *args):
set_module_args(
dict(
full_sync=True,
name="foo-group",
state="present",
server='localhost',
user='admin',
password='password'
)
)
current = ApiParameters(params=load_fixture('load_tm_cm_device_group.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
def test_delete_device_group(self, *args):
set_module_args(
dict(
name="foo-group",
state="absent",
server='localhost',
user='admin',
password='password'
)
)
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.exists = Mock(side_effect=[True, False])
mm.remove_from_device = Mock(return_value=True)
mm.remove_members_in_group_from_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
|
alexlo03/ansible
|
test/units/modules/network/f5/test_bigip_device_group.py
|
Python
|
gpl-3.0
| 5,779
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops.matrix_inverse."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
def _AddTest(test_class, op_name, testcase_name, fn):
test_name = "_".join(["test", op_name, testcase_name])
if hasattr(test_class, test_name):
raise RuntimeError("Test %s defined more than once" % test_name)
setattr(test_class, test_name, fn)
class QrOpTest(test.TestCase):
def testWrongDimensions(self):
# The input to qr should be a tensor of at least rank 2.
scalar = constant_op.constant(1.)
with self.assertRaisesRegexp(ValueError,
"Shape must be at least rank 2 but is rank 0"):
linalg_ops.qr(scalar)
vector = constant_op.constant([1., 2.])
with self.assertRaisesRegexp(ValueError,
"Shape must be at least rank 2 but is rank 1"):
linalg_ops.qr(vector)
def testConcurrentExecutesWithoutError(self):
with self.test_session(use_gpu=True) as sess:
all_ops = []
for full_matrices_ in True, False:
for rows_ in 4, 5:
for cols_ in 4, 5:
matrix1 = random_ops.random_normal([rows_, cols_], seed=42)
matrix2 = random_ops.random_normal([rows_, cols_], seed=42)
q1, r1 = linalg_ops.qr(matrix1, full_matrices=full_matrices_)
q2, r2 = linalg_ops.qr(matrix2, full_matrices=full_matrices_)
all_ops += [q1, r1, q2, r2]
val = sess.run(all_ops)
for i in range(8):
q = 4 * i
self.assertAllEqual(val[q], val[q + 2]) # q1 == q2
self.assertAllEqual(val[q + 1], val[q + 3]) # r1 == r2
def _GetQrOpTest(dtype_, shape_, full_matrices_, use_static_shape_):
is_complex = dtype_ in (np.complex64, np.complex128)
is_single = dtype_ in (np.float32, np.complex64)
def CompareOrthogonal(self, x, y, rank):
if is_single:
atol = 5e-4
else:
atol = 5e-14
# We only compare the first 'rank' orthogonal vectors since the
# remainder form an arbitrary orthonormal basis for the
# (row- or column-) null space, whose exact value depends on
# implementation details. Notice that since we check that the
# matrices of singular vectors are unitary elsewhere, we do
# implicitly test that the trailing vectors of x and y span the
# same space.
x = x[..., 0:rank]
y = y[..., 0:rank]
# Q is only unique up to sign (complex phase factor for complex matrices),
# so we normalize the sign first.
sum_of_ratios = np.sum(np.divide(y, x), -2, keepdims=True)
phases = np.divide(sum_of_ratios, np.abs(sum_of_ratios))
x *= phases
self.assertAllClose(x, y, atol=atol)
def CheckApproximation(self, a, q, r):
if is_single:
tol = 1e-5
else:
tol = 1e-14
# Tests that a ~= q*r.
a_recon = math_ops.matmul(q, r)
self.assertAllClose(a_recon.eval(), a, rtol=tol, atol=tol)
def CheckUnitary(self, x):
# Tests that x[...,:,:]^H * x[...,:,:] is close to the identity.
xx = math_ops.matmul(x, x, adjoint_a=True)
identity = array_ops.matrix_band_part(array_ops.ones_like(xx), 0, 0)
if is_single:
tol = 1e-5
else:
tol = 1e-14
self.assertAllClose(identity.eval(), xx.eval(), atol=tol)
def Test(self):
np.random.seed(1)
x_np = np.random.uniform(
low=-1.0, high=1.0, size=np.prod(shape_)).reshape(shape_).astype(dtype_)
if is_complex:
x_np += 1j * np.random.uniform(
low=-1.0, high=1.0,
size=np.prod(shape_)).reshape(shape_).astype(dtype_)
with self.test_session(use_gpu=True) as sess:
if use_static_shape_:
x_tf = constant_op.constant(x_np)
else:
x_tf = array_ops.placeholder(dtype_)
q_tf, r_tf = linalg_ops.qr(x_tf, full_matrices=full_matrices_)
if use_static_shape_:
q_tf_val, r_tf_val = sess.run([q_tf, r_tf])
else:
q_tf_val, r_tf_val = sess.run([q_tf, r_tf], feed_dict={x_tf: x_np})
q_dims = q_tf_val.shape
np_q = np.ndarray(q_dims, dtype_)
np_q_reshape = np.reshape(np_q, (-1, q_dims[-2], q_dims[-1]))
new_first_dim = np_q_reshape.shape[0]
x_reshape = np.reshape(x_np, (-1, x_np.shape[-2], x_np.shape[-1]))
for i in range(new_first_dim):
if full_matrices_:
np_q_reshape[i, :, :], _ = np.linalg.qr(
x_reshape[i, :, :], mode="complete")
else:
np_q_reshape[i, :, :], _ = np.linalg.qr(
x_reshape[i, :, :], mode="reduced")
np_q = np.reshape(np_q_reshape, q_dims)
CompareOrthogonal(self, np_q, q_tf_val, min(shape_[-2:]))
CheckApproximation(self, x_np, q_tf_val, r_tf_val)
CheckUnitary(self, q_tf_val)
return Test
class QrGradOpTest(test.TestCase):
pass
def _GetQrGradOpTest(dtype_, shape_, full_matrices_):
def Test(self):
np.random.seed(42)
a = np.random.uniform(low=-1.0, high=1.0, size=shape_).astype(dtype_)
if dtype_ in [np.complex64, np.complex128]:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=shape_).astype(dtype_)
# Optimal stepsize for central difference is O(epsilon^{1/3}).
epsilon = np.finfo(dtype_).eps
delta = 0.1 * epsilon**(1.0 / 3.0)
if dtype_ in [np.float32, np.complex64]:
tol = 3e-2
else:
tol = 1e-6
with self.test_session(use_gpu=True):
tf_a = constant_op.constant(a)
tf_b = linalg_ops.qr(tf_a, full_matrices=full_matrices_)
for b in tf_b:
x_init = np.random.uniform(
low=-1.0, high=1.0, size=shape_).astype(dtype_)
if dtype_ in [np.complex64, np.complex128]:
x_init += 1j * np.random.uniform(
low=-1.0, high=1.0, size=shape_).astype(dtype_)
theoretical, numerical = gradient_checker.compute_gradient(
tf_a,
tf_a.get_shape().as_list(),
b,
b.get_shape().as_list(),
x_init_value=x_init,
delta=delta)
self.assertAllClose(theoretical, numerical, atol=tol, rtol=tol)
return Test
if __name__ == "__main__":
for dtype in np.float32, np.float64, np.complex64, np.complex128:
for rows in 1, 2, 5, 10, 32, 100:
for cols in 1, 2, 5, 10, 32, 100:
for full_matrices in False, True:
for batch_dims in [(), (3,)] + [(3, 2)] * (max(rows, cols) < 10):
for use_static_shape in True, False:
shape = batch_dims + (rows, cols)
name = "%s_%s_full_%s_static_%s" % (dtype.__name__,
"_".join(map(str, shape)),
full_matrices,
use_static_shape)
_AddTest(QrOpTest, "Qr", name,
_GetQrOpTest(dtype, shape, full_matrices,
use_static_shape))
# TODO(pfau): Get working with complex types.
# TODO(pfau): Get working with full_matrices when rows != cols
# TODO(pfau): Get working when rows < cols
# TODO(pfau): Get working with shapeholders (dynamic shapes)
for full_matrices in False, True:
for dtype in np.float32, np.float64:
for rows in 1, 2, 5, 10:
for cols in 1, 2, 5, 10:
if rows == cols or (not full_matrices and rows > cols):
for batch_dims in [(), (3,)] + [(3, 2)] * (max(rows, cols) < 10):
shape = batch_dims + (rows, cols)
name = "%s_%s_full_%s" % (dtype.__name__,
"_".join(map(str, shape)),
full_matrices)
_AddTest(QrGradOpTest, "QrGrad", name,
_GetQrGradOpTest(dtype, shape, full_matrices))
test.main()
|
drpngx/tensorflow
|
tensorflow/python/kernel_tests/qr_op_test.py
|
Python
|
apache-2.0
| 8,834
|
#!/usr/bin/python
# http://stackoverflow.com/questions/4628122/how-to-construct-a-timedelta-object-from-a-simple-string
import re
line = "Run '/tmp/aggr_aql.txt'"
regex = re.compile(r'^.*Run \'(?P<filename>[^\']+?)\'.*$')
parts = regex.match(line)
print "parts = %s" % str(parts)
print "parts = %s" % str(dir(parts))
parts = parts.groupdict()
print "parts = %s" % str(parts)
filename = parts['filename']
print "filename = %s" % str(filename)
|
jtraver/dev
|
python/re/re4.py
|
Python
|
mit
| 450
|
from .base import ScrollWindow
from settings_inspector.gui import keys
class VariablesWindow(ScrollWindow):
def __init__(self, settings, *args, **kwargs):
super(VariablesWindow, self).__init__(*args, **kwargs)
self.root_settings = settings
self.reset()
self.render()
return self
def reset(self):
self.settings = {}
self.current_line = 0
self.current_column = 0
self.add_variables()
self.refresh()
def add_variables(self):
for name, variable in self.root_settings.variable_registry.variables.items():
self.write(u"%s = %s" % (variable.name, variable.value))
self.next_line()
def on_ch(self, cmd):
if cmd == keys.LOWERCASE_S:
self.parent_ui.show_settings()
else:
super(VariablesWindow, self).on_ch(cmd)
class VariableHistoryWindow(ScrollWindow):
def __init__(self, settings, variable, *args, **kwargs):
super(VariableHistoryWindow, self).__init__(*args, **kwargs)
self.root_settings = settings
self.variable = variable
self.reset()
self.render()
return self
def reset(self):
self.settings = {}
self.current_line = 0
self.current_column = 0
self.add_variable()
self.refresh()
def add_variable(self):
for assignment in self.variable.assignment:
self.write(u"%s" % (assignment))
self.next_line()
|
fcurella/django-settings_inspector
|
settings_inspector/gui/windows/variables.py
|
Python
|
mit
| 1,500
|
#!/usr/bin/python -tt
import pymetar
import sys
import os
if __name__ == "__main__":
if len(sys.argv) > 1:
repdir=sys.argv[1]
else:
repdir=("reports")
if len(sys.argv) > 2:
reports = sys.argv[2:]
else:
reports = os.listdir(repdir)
reports.sort()
count=0
rf=pymetar.ReportFetcher()
for reportfile in reports:
station = reportfile[:-4]
fd = open("%s/%s" % (repdir, reportfile))
report = fd.read()
fd.close()
repo = rf.MakeReport(station, report)
rp = pymetar.ReportParser()
pr = rp.ParseReport(repo)
a=pr.getCloudtype()
if a != None:
print "%s: %s"% (station, a)
count += 1
sys.stderr.write("%s station reports check out ok\n" % (count))
|
theswitch/pymetar3
|
testing/smoketest/testcloud.py
|
Python
|
gpl-2.0
| 819
|
# -*- coding: utf-8; -*-
#
# @file __init__.py
# @brief Application Django base url
# @authors Frédéric SCHERMA (INRA UMR1095)
# @date 2017-10-06
# @copyright Copyright (c) 2017 INRA/CIRAD
# @license MIT (see LICENSE file)
# @details
from django.conf.urls import include, url
from django.conf import settings
urlpatterns = []
|
coll-gate/collgate
|
messenger/urls.py
|
Python
|
mit
| 330
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import fasttext as ft
import os
import regex
import sys
def get_parser():
parser = argparse.ArgumentParser(
description="reads text from stdin and outputs normalized, lid-filtered version to stdout"
)
parser.add_argument(
"--fasttext-model",
help="path to fasttext model",
default="lid.187.bin",
)
parser.add_argument("--lang", help="language id", required=True)
parser.add_argument(
"--lid-threshold",
type=float,
help="threshold for this lang id probability",
default=0.4,
)
return parser
def main():
parser = get_parser()
args = parser.parse_args()
filter_r = regex.compile(r"[^\p{L}\p{N}\p{M}\' \-]")
lg = args.lang.lower()
lg_label = f"__label__{lg}"
thresh = args.lid_threshold
if os.path.exists(args.fasttext_model):
model = ft.load_model(args.fasttext_model)
else:
print(
f"fasttext language id model {args.fasttext_model} not found. Proceeding without language filtering. "
f"To enable language filtering, please download the latest language id model "
f"from https://fasttext.cc/docs/en/language-identification.html",
file=sys.stderr,
)
model = None
for line in sys.stdin:
line = line.strip()
line = filter_r.sub(" ", line)
line = " ".join(line.split())
if model is not None:
lid, prob = model.predict(line, k=100)
try:
target_idx = lid.index(lg_label)
except ValueError:
continue
if target_idx == 0 or prob[target_idx] >= thresh:
print(line)
else:
print(line)
if __name__ == "__main__":
main()
|
pytorch/fairseq
|
examples/wav2vec/unsupervised/scripts/normalize_and_filter_text.py
|
Python
|
mit
| 1,997
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('../')
from utils import multi_index_to_single_index
import csv
import json
import pandas as pd
# Read csv from source csv
df = pd.read_csv('REGION_DEMOGR_life_expectancy_and_mortality.csv')
df = df[['TL', 'REG_ID', 'Region', 'VAR', 'SEX', 'Year', 'Value']]
# First remove geos with names that we don't have mappings to dcid for.
regid2dcid = dict(json.loads(open('../regid2dcid.json').read()))
df = df[df['REG_ID'].isin(regid2dcid.keys())]
# Second, replace the names with dcids
df['Region'] = df.apply(lambda row: regid2dcid[row['REG_ID']], axis=1)
# process the source data
df = df[['REG_ID', 'Region', 'VAR', 'SEX', 'Year', 'Value']]
df_clear = df.drop(df[(df['VAR'] == 'INF_SEXDIF') |
(df['VAR'] == 'LIFE_SEXDIF')].index)
df_clear['Year'] = '"' + df_clear['Year'].astype(str) + '"'
df_cleaned = df_clear.pivot_table(values='Value',
index=['REG_ID', 'Region', 'Year'],
columns=['VAR', 'SEX'])
df_cleaned['DEATH_RA'] = df_cleaned['DEATH_RA'] / 1000
df_cleaned['INF_MORT'] = df_cleaned['INF_MORT'] / 1000
df_cleaned['STD_MORT'] = df_cleaned['STD_MORT'] / 1000
df_cleaned['YOU_DEATH_RA'] = df_cleaned['YOU_DEATH_RA'] / 1000
df_cleaned = multi_index_to_single_index(df_cleaned)
VAR_to_statsvars = {
'DEATH_RAT':
'Count_Death_AsAFractionOf_Count_Person',
'DEATH_RAM':
'Count_Death_Male_AsAFractionOf_Count_Person_Male',
'DEATH_RAF':
'Count_Death_Female_AsAFractionOf_Count_Person_Female',
'STD_MORTT':
'Count_Death_AgeAdjusted_AsAFractionOf_Count_Person',
'STD_MORTM':
'Count_Death_Male_AgeAdjusted_AsAFractionOf_Count_Person_Male',
'STD_MORTF':
'Count_Death_Female_AgeAdjusted_AsAFractionOf_Count_Person_Female',
'YOU_DEATH_RAT':
'Count_Death_Upto14Years_AsAFractionOf_Count_Person_Upto14Years',
'YOU_DEATH_RAM':
'Count_Death_Upto14Years_Male_AsAFractionOf_Count_Person_Upto14Years_Male',
'YOU_DEATH_RAF':
'Count_Death_Upto14Years_Female_AsAFractionOf_Count_Person_Upto14Years_Female',
'INF_MORTT':
'Count_Death_LessThan1Year_AsAFractionOf_Count_BirthEvent',
'INF_MORTM':
'Count_Death_LessThan1Year_Male_AsAFractionOf_Count_BirthEvent_Male',
'INF_MORTF':
'Count_Death_LessThan1Year_Female_AsAFractionOf_Count_BirthEvent_Female',
'LIFE_EXPT':
'LifeExpectancy_Person',
'LIFE_EXPF':
'LifeExpectancy_Person_Female',
'LIFE_EXPM':
'LifeExpectancy_Person_Male',
}
df_cleaned.rename(columns=VAR_to_statsvars, inplace=True)
df_cleaned.to_csv('OECD_life_expectancy_and_mortality_cleaned.csv',
index=False,
quoting=csv.QUOTE_NONE)
TEMPLATE_MCF_TEMPLATE = """
Node: E:OECD_life_expectancy_and_mortality_cleaned->E{index}
typeOf: dcs:StatVarObservation
variableMeasured: dcs:{stat_var}
measurementMethod: dcs:OECDRegionalStatistics
observationAbout: C:OECD_life_expectancy_and_mortality_cleaned->Region
observationDate: C:OECD_life_expectancy_and_mortality_cleaned->Year
observationPeriod: "P1Y"
value: C:OECD_life_expectancy_and_mortality_cleaned->{stat_var}
"""
TEMPLATE_MCF_TEMPLATE_YEAR = """
Node: E:OECD_life_expectancy_and_mortality_cleaned->E{index}
typeOf: dcs:StatVarObservation
variableMeasured: dcs:{stat_var}
measurementMethod: dcs:OECDRegionalStatistics
observationAbout: C:OECD_life_expectancy_and_mortality_cleaned->Region
observationDate: C:OECD_life_expectancy_and_mortality_cleaned->Year
observationPeriod: "P1Y"
value: C:OECD_life_expectancy_and_mortality_cleaned->{stat_var}
unit: dcs:Year
"""
stat_vars = df_cleaned.columns[3:]
with open('OECD_life_expectancy_and_mortality.tmcf', 'w', newline='') as f_out:
for i in range(len(stat_vars)):
if stat_vars[i].startswith("LifeExpectancy"):
f_out.write(
TEMPLATE_MCF_TEMPLATE_YEAR.format_map({
'index': i + 1,
'stat_var': stat_vars[i]
}))
else:
f_out.write(
TEMPLATE_MCF_TEMPLATE.format_map({
'index': i + 1,
'stat_var': stat_vars[i]
}))
|
datacommonsorg/data
|
scripts/oecd/regional_demography/life_expectancy_and_mortality/preprocess_csv.py
|
Python
|
apache-2.0
| 4,798
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Currency rate date check module for OpenERP
# Copyright (C) 2012-2013 Akretion (http://www.akretion.com).
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Currency Rate Date Check',
'version': '1.0',
'category': 'Financial Management/Configuration',
'license': 'AGPL-3',
'summary': "Make sure currency rates used are always up-to-update",
'description': """
Currency Rate Date Check
========================
This module adds a check on dates when doing currency conversion in OpenERP. It checks that the currency rate used to make the conversion is not more than N days away from the date of the amount to convert. The maximum number of days of the interval can be configured on the company form.
Please contact Alexis de Lattre from Akretion <alexis.delattre@akretion.com> for any help or question about this module.
""",
'author': 'Akretion',
'website': 'http://www.akretion.com',
'depends': ['base'],
'data': ['company_view.xml'],
'images': [
'images/date_check_error_popup.jpg',
'images/date_check_company_config.jpg',
],
'installable': False,
'active': False,
}
|
yvaucher/account-financial-tools
|
__unported__/currency_rate_date_check/__openerp__.py
|
Python
|
agpl-3.0
| 2,057
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-29 06:13
from __future__ import unicode_literals
import diabetics.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('diabetics', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='severity',
name='result',
field=models.CharField(max_length=20, null=True),
),
migrations.AlterField(
model_name='imagename',
name='image',
field=models.ImageField(null=True, upload_to=diabetics.models.ImageName.get_image_path),
),
]
|
jr55662003/Diabetic-Retinopathy-website
|
website/diabetics/migrations/0002_auto_20161229_0613.py
|
Python
|
mit
| 682
|
import ivyrest
# [outputfolderroot] "/scripts/ivy/ivyoutput/sample_output";
a = ivyrest.IvyObj("localhost")
a.set_output_folder_root(".")
a.set_test_name("demo4_edit_rollup_DF")
a.hosts_luns(hosts = "sun159", select = "serial_number : 83011441")
## The [EditRollup] statement gives you access to the ivy Dynamic Feedback Controller engine's ability to deliver
## parameter updates to workload threads.
## The DFC engine C++ code that issues real=time parameter updates uses exactly the same interface,
## providing a rollup instance set, like "all=all", and parameter settings, such as "IOPS=100, fractionRead = "75%
## [EditRollup] "serial_number+LDEV = 83011441+0004" [parameters] "IOPS = 289";
## [EditRollup] "port = { 1A, 3A, 5A, 7A }" [parameters] "fractionRead = 100%"; // sorry, doesn't accept JSON syntax ... yet
a.create_workload(name = "r_steady", select = "", iosequencer = "random_steady", parameters = "fractionread=100%, maxtags=32, IOPS=max, blocksize = 512")
for blocksize in [4, 8, 64 ]:
a.edit_rollup(name="all=all", parameters = "blocksize = " + str(blocksize) + "kib")
a.go(stepname="iops_max_" + str(blocksize) + "KiB_random_read", measure_seconds = 10)
|
Hitachi-Data-Systems/ivy
|
rest_api/samples/DF_demos/demo4_edit_rollup_DF.py
|
Python
|
apache-2.0
| 1,211
|
'''
Copyright (c) 2008 Georgios Giannoudovardis, <vardis.g@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import os
import math
import logging
from pandac.PandaModules import Texture, NodePath
from pandac.PandaModules import TextureAttrib, CullFaceAttrib
from pandac.PandaModules import GeomVertexReader
from pandac.PandaModules import LineSegs
from pandac.PandaModules import Filename
from direct.showbase.PythonUtil import *
from pandac.PandaModules import Mat4, VBase3
from pano.constants import PanoConstants
from pano.view.VideoPlayer import VideoPlayer
from pano.view.sprites import *
class BaseRenderer(object):
def __init__(self, resources):
self.log = logging.getLogger('pano.baseRenderer')
# used for loading resources
self.resources = resources
# the game node that we are rendering
self.node = None
# the root scenegraph node
self.sceneRoot = None
# sprites in format {hotspot_name : (<spriteRenderInterface instance>)}
self.spritesByHotspot = {}
# if True then the debug geometries for the hotspots will be drawn
self.drawHotspots = False
def initialize(self):
self.debugGeomsParent = None
def dispose(self):
'''
Disposes any rendering resources, it assumes that this instance won't be used again.
'''
self.clearScene()
if self.sceneRoot is not None:
self.sceneRoot.removeNode()
self.sceneRoot = None
def clearScene(self):
'''
Clears the scenegraph effectively removing all nodes from rendering.
'''
# remove and destroy debug geometries
self.debugGeomsParent.removeNode()
self.debugGeomsParent = None
# same for hotspots
for sri in self.spritesByHotspot.values():
sri.remove()
self.spritesByHotspot = {}
def displayNode(self, node):
"""
Displays the given node.
"""
self.node = node
def getNode(self):
"""
Returns the Node object that we are currently rendering.
"""
return self.node
def getSceneRoot(self):
'''
Returns the Nodepath that acts as the scene root.
'''
return self.sceneRoot
def getCamera(self):
'''
Returns the Camera instance used for rendering this node.
'''
pass
def render(self, millis):
pass
def pauseAnimations(self):
"""
Stops all node animations.
"""
for sri in self.spritesByHotspot.values():
sri.pause()
def resumeAnimations(self):
"""
Resumes node animations.
"""
for sri in self.spritesByHotspot.values():
sri.play()
def drawDebugHotspots(self, flag):
self.drawHotspots = flag
if flag:
self.debugGeomsParent.show()
else:
self.debugGeomsParent.hide()
def renderHotspot(self, hp, sprite = None):
'''
Renders the given hotspot using its associated sprite.Details of the rendering technique
is left for the subclasses.
@param hp: The hotspot to render.
@param sprite: If not None it is used for overriding the hotspot's sprite.
'''
pass
def renderHotspotDebugGeom(self, hp):
'''
Renders a debug geometry for the given hotspot. Details of the nature of this debug geometry
is defined explicitly in subclasses.
@param hp: The hotspot for which to render debug geometry.
'''
pass
def getHotspotSprite(self, hotspot):
"""
Returns a SpriteRenderInterface which can be used to control the hotspot's sprite.
@param hotspot: The hotspot instance
@return: A SpriteRenderInterface instance or None.
"""
return self.spritesByHotspot.get(hotspot.name)
def removeHotspot(self, hotspot):
'''
Removes a hotspot from the render list. The debug geometry and sprite associated with the hotspot
won't be visible anymore.
@param hotspot: A pano.model.Hotspot instance for the hotspot to be removed.
'''
if self.log.isEnabledFor(logging.DEBUG):
self.log.debug('Removing hotspot %s' % hotspot.name)
spr = self.getHotspotSprite(hotspot)
if spr is not None:
spr.remove()
del self.spritesByHotspot[hotspot.name]
# remove the hotspot's debug geometry
if self.debugGeomsParent is not None:
np = self.debugGeomsParent.find('debug_' + hotspot.name)
if np != None and not np.isEmpty():
np.removeNode()
def hideHotspot(self, hp):
'''
Hides the scene node that parents the hotspots sprite in the scene.
@param hp: The hotspot that will be hidden.
'''
if self.log.isEnabledFor(logging.DEBUG):
self.log.debug('Hiding hotspot %s' % hp.name)
sri = self.getHotspotSprite(hp)
if sri is not None:
sri.hide()
def showHotspot(self, hp):
'''
Shows the scene node that parents the hotspots sprite in the scene.
@param hp: The hotspot that will be shown.
'''
if self.log.isEnabledFor(logging.DEBUG):
self.log.debug('Showing hotspot %s' % hp.name)
sri = self.getHotspotSprite(hp)
if sri is not None:
sri.show()
def replaceHotspotSprite(self, hotspot, newSprite):
'''
Changes the visual appearance of a hotspot by replacing its sprite with a new one.
@param hp: The hotspot that will have its sprite replaced.
@param newSprite: The name of the new sprite to use for rendering.
'''
if self.log.isEnabledFor(logging.DEBUG):
self.log.debug("Replacing hotspot's %s sprite with %s" % (hotspot.name, newSprite))
self.removeHotspot(hotspot)
self.renderHotspot(hotspot, newSprite)
self.renderHotspotDebugGeom(hotspot)
def raycastHotspots(self):
pass
def getFaceTextureDimensions(self, face):
"""
Returns a tuple containing the width and height of the cubemap textures.
tuple[0] holds the width while tuple[1] holds the height of the textures.
"""
pass
def findFaceFromNormal(self, n):
pass
def getFaceLocalCoords(self, face, point):
pass
|
vardis/pano
|
src/pano/view/BaseRenderer.py
|
Python
|
mit
| 7,689
|
from glob import glob
def load_file(filename):
with open(filename) as f:
return f.read().strip()
def load_fixtures(test_name):
path = "tests/fixtures/{test_name}/*.{direction}.html"
in_files = sorted(glob(path.format(test_name=test_name, direction="in")))
out_files = sorted(glob(path.format(test_name=test_name, direction="out")))
return zip(in_files, out_files)
|
Lukas0907/feeds
|
tests/utils.py
|
Python
|
agpl-3.0
| 396
|
#!/usr/bin/env python3
import sys, requests, json
from housepy import strings, util, log
from mongo import db, ObjectId
print("Sighting deduper")
results = db.features.aggregate([
{ '$match': {
'properties.Expedition': "okavango_16",
'properties.FeatureType': "sighting",
}},
{ '$group': {
'_id': { 'properties.t_utc': "$properties.t_utc", 'properties.Member': "$properties.Member"},
'uniqueIds': { '$addToSet': "$_id" },
'count': { '$sum': 1 }
}},
{ '$match': {
'count': { '$gt': 1 }
}}
], allowDiskUse=True)
results = list(results)
for result in results:
print(result)
print()
for result in results:
feature_ids = result['uniqueIds']
features = [db.features.find_one({'_id': feature_id}) for feature_id in feature_ids]
features.sort(key=lambda f: f['properties']['t_created'])
features.reverse()
# note that we're taking the last one entered in the db as a gold standard -- if earlier entries have properties that arent in this one, they wont be checked
gold = features[0]
print(json.dumps(gold, indent=4, default=lambda x: str(x)))
print("%s potential duplicate%s" % (len(features[1:]), "s" if len(features[1:]) > 1 else ""))
for feature in features[1:]:
dup = True
for prop in gold['properties']:
if prop in feature['properties'] and feature['properties'][prop] != gold['properties'][prop]:
if prop == "t_created":
continue
dup = False
print("Not a dup: %s" % prop)
break
if dup:
result = input("Delete %s? [y/n]: " % feature['_id'])
if result == "" or result == "y":
d = db.features.remove({'_id': feature['_id']})
print(d)
else:
print("--> skipped")
"""
Terminal for side by side comparison would be cool
"""
|
O-C-R/intotheokavango
|
tools/deduper.py
|
Python
|
mit
| 1,949
|
class Person(object):
"""人的类"""
def __init__(self, name):
super(Person, self).__init__()
self.name = name
self.gun = None#用来保存枪对象的引用
self.hp = 100
def anzhuang_zidan(self, dan_jia_temp, zi_dan_temp):
"""把子弹装到弹夹中"""
#弹夹.保存子弹(子弹)
dan_jia_temp.baocun_zidan(zi_dan_temp)
def anzhuang_danjia(self, gun_temp, dan_jia_temp):
"""把弹夹安装到枪中"""
#枪.保存弹夹(弹夹)
gun_temp.baocun_danjia(dan_jia_temp)
def naqiang(self, gun_temp):
"""拿起一把枪"""
self.gun = gun_temp
def __str__(self):
if self.gun:
return "%s的血量为:%d, 他有枪 %s"%(self.name, self.hp, self.gun)
else:
return "%s的血量为%d, 他没有枪"%(self.name, self.hp)
class Gun(object):
"""枪类"""
def __init__(self, name):
super(Gun, self).__init__()
self.name = name#用来记录枪的类型
self.danjia = None#用来记录弹夹对象的引用
def baocun_danjia(self, dan_jia_temp):
"""用一个属性来保存这个弹夹对象的引用"""
self.danjia = dan_jia_temp
def __str__(self):
if self.danjia:
return "枪的信息为:%s, %s"%(self.name, self.danjia)
else:
return "枪的信息为:%s,这把枪中没有弹夹"%(self.name)
class Danjia(object):
"""弹夹类"""
def __init__(self, max_num):
super(Danjia, self).__init__()
self.max_num = max_num#用来记录弹夹的最大容量
self.zidan_list = []#用来记录所有的子弹的引用
def baocun_zidan(self, zi_dan_temp):
"""将这颗子弹保存"""
self.zidan_list.append(zi_dan_temp)
def __str__(self):
return "弹夹的信息为:%d/%d"%(len(self.zidan_list), self.max_num)
class Zidan(object):
"""子弹类"""
def __init__(self, sha_shang_li):
super(Zidan, self).__init__()
self.sha_shang_li = sha_shang_li#这颗子弹的威力
def main():
"""用来控制整个程序的流程"""
#1. 创建老王对象
laowang = Person("老王")
#2. 创建一个枪对象
ak47 = Gun("AK47")
#3. 创建一个弹夹对象
dan_jia = Danjia(20)
#4. 创建一些子弹
for i in range(15):
zi_dan = Zidan(10)
#5. 老王把子弹安装到弹夹中
#老王.安装子弹到弹夹中(弹夹,子弹)
laowang.anzhuang_zidan(dan_jia, zi_dan)
#6. 老王把弹夹安装到枪中
#老王.安装弹夹到枪中(枪,弹夹)
laowang.anzhuang_danjia(ak47, dan_jia)
#test:测试弹夹的信息
#print(dan_jia)
#test:测试枪的信息
#print(ak47)
#7. 老王拿枪
#老王.拿枪(枪)
laowang.naqiang(ak47)
#test:测试老王对象
print(laowang)
#8. 创建一个敌人
#9. 老王开枪打敌人
if __name__ == '__main__':
main()
|
jameswatt2008/jameswatt2008.github.io
|
python/Python基础/截图和代码/加强/老王开枪/老王开枪-6-老王拿枪.py
|
Python
|
gpl-2.0
| 2,658
|
from nodeconductor.cost_tracking import CostTrackingStrategy, CostTrackingRegister, ConsumableItem
from . import models
class ExchangeTenantStrategy(CostTrackingStrategy):
resource_class = models.ExchangeTenant
class Types(object):
SUPPORT = 'support'
STORAGE = 'storage'
class Keys(object):
STORAGE = '1 GB'
SUPPORT = 'premium'
@classmethod
def get_consumable_items(cls):
return [
ConsumableItem(item_type=cls.Types.STORAGE, key=cls.Keys.STORAGE, name='1 GB of storage', units='GB'),
ConsumableItem(item_type=cls.Types.SUPPORT, key=cls.Keys.SUPPORT, name='Support: premium'),
]
@classmethod
def get_configuration(cls, tenant):
storage = tenant.quotas.get(name=models.ExchangeTenant.Quotas.mailbox_size).usage
return {
ConsumableItem(item_type=cls.Types.STORAGE, key=cls.Keys.STORAGE): float(storage) / 1024,
ConsumableItem(item_type=cls.Types.SUPPORT, key=cls.Keys.SUPPORT): 1,
}
CostTrackingRegister.register_strategy(ExchangeTenantStrategy)
|
opennode/nodeconductor-saltstack
|
src/nodeconductor_saltstack/exchange/cost_tracking.py
|
Python
|
mit
| 1,101
|
from mock import patch, Mock
from nefertari_guards.nefertari_sqla import ACLType
class TestACLType(object):
@patch.object(ACLType, 'stringify_acl')
@patch.object(ACLType, 'validate_acl')
def test_process_bind_param(self, mock_validate, mock_str):
mock_str.return_value = [[1, 2, [3]]]
obj = ACLType()
obj.process_bind_param([('a', 'b', 'c')], Mock())
mock_str.assert_called_once_with([('a', 'b', 'c')])
mock_validate.assert_called_once_with([[1, 2, [3]]])
|
brandicted/nefertari-guards
|
tests/test_nefertari_sqla.py
|
Python
|
apache-2.0
| 512
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _ # noqa
import horizon
from wildcard.api import keystone
from wildcard.dashboards.admin import dashboard
class Groups(horizon.Panel):
name = _("Groups")
slug = 'groups'
if keystone.VERSIONS.active >= 3:
dashboard.Admin.register(Groups)
|
kickstandproject/wildcard
|
wildcard/dashboards/admin/groups/panel.py
|
Python
|
apache-2.0
| 989
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Pestpp(CMakePackage):
"""PEST++ is a software suite aimed at supporting complex numerical
models in the decision-support context. Much focus has been devoted to
supporting environmental models (groundwater, surface water, etc) but
these tools are readily applicable to any computer model.
"""
homepage = "https://pesthomepage.org"
url = "https://github.com/usgs/pestpp/archive/5.0.5.tar.gz"
version('5.0.5', sha256='b9695724758f69c1199371608b01419973bd1475b1788039a2fab6313f6ed67c')
variant('mpi', default=True, description='Enable MPI support')
depends_on('cmake@3.9:', type='build')
depends_on('mpi', type=('build', 'run'), when='+mpi')
def install(self, spec, prefix):
install_tree('bin', prefix.bin)
|
LLNL/spack
|
var/spack/repos/builtin/packages/pestpp/package.py
|
Python
|
lgpl-2.1
| 996
|
# Django settings for to project.
import os
from local_setting import *
from settings import DEBUG
TEMPLATE_DEBUG = DEBUG
MANAGERS = ADMINS
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(BASE_DIR
, 'db.sqlite3'), # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
#STATIC_URL = '/static/'
STATIC_URL = 'http://disi.unitn.it/~tranquillini/static/todo/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'to.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'to.wsgi.application'
TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), '..', '../to/do/templates').replace('\\', '/'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'do',
'crispy_forms',
'django.contrib.humanize',
'social.apps.django_app.default',
'pomodoro',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'standard': {
'format': "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)s] %(message)s",
'datefmt': "%d/%b/%Y %H:%M:%S"
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'do': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
'pomodoro': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
}
}
}
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
# 'social_auth.context_processors.social_auth_by_type_backends',
# 'general.context_processors.addProfile',
# 'general.context_processors.addAppName',
'django.core.context_processors.request',
# 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
'social.apps.django_app.context_processors.backends',
'social.apps.django_app.context_processors.login_redirect',
)
AUTHENTICATION_BACKENDS = (
# 'social.backends.open_id.OpenIdAuth',
'social.backends.google.GoogleOpenId',
# 'social.backends.google.GoogleOAuth2',
# 'social.backends.google.GoogleOAuth',
# 'social.backends.twitter.TwitterOAuth',
# 'social.backends.facebook.FacebookOAuth2',
# 'social.backends.yahoo.YahooOpenId',
# ...
# 'django.contrib.auth.backends.ModelBackend',
)
CRISPY_TEMPLATE_PACK = 'bootstrap3'
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
|
esseti/dododo-dadada
|
to/settings_local.py
|
Python
|
mit
| 6,738
|
import unittest
from PySide2.QtWidgets import QWidget, QMainWindow
from helper import UsesQApplication
class QWidgetInherit(QMainWindow):
def __init__(self):
QWidget.__init__(self)
class QWidgetTest(UsesQApplication):
def testInheritance(self):
self.assertRaises(TypeError, QWidgetInherit)
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
|
BadSingleton/pyside2
|
tests/QtWidgets/qwidget_test.py
|
Python
|
lgpl-2.1
| 646
|
# vim:tw=50
""""While" Loops
Recursion is powerful, but not always convenient
or efficient for processing sequences. That's why
Python has **loops**.
A _loop_ is just what it sounds like: you do
something, then you go round and do it again, like
a track: you run around, then you run around again.
Loops let you do repetitive things, like printing
all of the elements of a list, or adding them all
together, without using recursion.
Python supports two kinds. We'll start with
**while loops**.
A |while| statement is like an |if| statement, in
that it executes the indented block if its condition is
|True| (nonzero). But, unlike |if|, it *keeps on
doing it* until the condition becomes |False| or
it hits a |break| statement. Forever.
The code window shows a while loop that prints
every element of a list. There's another one that
adds all of the elements. It does this
without recursion. Check it out.
Exercises
- Look at |print_all|. Why does it eventually
stop? What is the value of |i| when it does?
- Why does |slicing_print_all| stop? How does it
work?
"""
__doc__ = """Use while loops to do things repetitively."""
def print_all(seq):
"""Print all elements of seq."""
i = 0
while i < len(seq):
print("item", i, seq[i])
i = i + 1 # This is also spelled 'i += 1'
def slicing_print_all(seq):
"""Another way of using while - less efficient."""
while seq:
print(seq[0])
seq = seq[1:]
def add_all(seq):
"""Add all of the elements of seq."""
i = 0
s = 0
while i < len(seq):
s += seq[i]
i += 1
return s
print("Using indices:")
print_all([1, 5, 8, "hello", 9])
print("Using slices:")
slicing_print_all(range(3))
print("Summing:")
print("sum of all:", add_all(range(1,12))) # Should be 66
|
shiblon/pytour
|
3/tutorials/while_loops.py
|
Python
|
apache-2.0
| 1,756
|
"""Functional tests for the Getstatus operation"""
import pytest
from pyxb import BIND
from pyxb.bundles.opengis import oseo_1_0 as oseo
from pyxb.bundles.wssplat import soap12
from pyxb.bundles.wssplat import wsse
import requests
pytestmark = pytest.mark.functional
class TestGetStatus(object):
def test_get_status(self, pyoseo_remote_server, pyoseo_server_user,
pyoseo_server_password, settings):
pass
|
pyoseo/pyoseo
|
tests/functionaltests/test_getstatus.py
|
Python
|
apache-2.0
| 445
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
import six
from six.moves.urllib import parse as urlparse
from sahara import conductor as c
from sahara import context
from sahara.plugins import base as plugin_base
from sahara.service import quotas
from sahara.utils import cluster as c_u
from sahara.utils import general as g
from sahara.utils.notification import sender
from sahara.utils.openstack import base as b
from sahara.utils.openstack import nova
conductor = c.API
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
OPS = None
def setup_service_api(ops):
global OPS
OPS = ops
# Cluster ops
def get_clusters(**kwargs):
return conductor.cluster_get_all(context.ctx(), **kwargs)
def get_cluster(id, show_progress=False):
return conductor.cluster_get(context.ctx(), id, show_progress)
def scale_cluster(id, data):
context.set_current_cluster_id(id)
ctx = context.ctx()
cluster = conductor.cluster_get(ctx, id)
plugin = plugin_base.PLUGINS.get_plugin(cluster.plugin_name)
existing_node_groups = data.get('resize_node_groups', [])
additional_node_groups = data.get('add_node_groups', [])
# the next map is the main object we will work with
# to_be_enlarged : {node_group_id: desired_amount_of_instances}
to_be_enlarged = {}
for ng in existing_node_groups:
ng_id = g.find(cluster.node_groups, name=ng['name'])['id']
to_be_enlarged.update({ng_id: ng['count']})
additional = construct_ngs_for_scaling(cluster, additional_node_groups)
cluster = conductor.cluster_get(ctx, cluster)
_add_ports_for_auto_sg(ctx, cluster, plugin)
try:
cluster = c_u.change_cluster_status(
cluster, c_u.CLUSTER_STATUS_VALIDATING)
quotas.check_scaling(cluster, to_be_enlarged, additional)
plugin.recommend_configs(cluster, scaling=True)
plugin.validate_scaling(cluster, to_be_enlarged, additional)
except Exception as e:
with excutils.save_and_reraise_exception():
c_u.clean_cluster_from_empty_ng(cluster)
c_u.change_cluster_status(
cluster, c_u.CLUSTER_STATUS_ACTIVE, six.text_type(e))
# If we are here validation is successful.
# So let's update to_be_enlarged map:
to_be_enlarged.update(additional)
for node_group in cluster.node_groups:
if node_group.id not in to_be_enlarged:
to_be_enlarged[node_group.id] = node_group.count
OPS.provision_scaled_cluster(id, to_be_enlarged)
return cluster
def create_cluster(values):
plugin = plugin_base.PLUGINS.get_plugin(values['plugin_name'])
return _cluster_create(values, plugin)
def create_multiple_clusters(values):
num_of_clusters = values['count']
clusters = []
plugin = plugin_base.PLUGINS.get_plugin(values['plugin_name'])
for counter in range(num_of_clusters):
cluster_dict = values.copy()
cluster_name = cluster_dict['name']
cluster_dict['name'] = get_multiple_cluster_name(num_of_clusters,
cluster_name,
counter + 1)
cluster = _cluster_create(cluster_dict, plugin)
clusters.append(cluster.id)
clusters_dict = {'clusters': clusters}
return clusters_dict
def _cluster_create(values, plugin):
ctx = context.ctx()
cluster = conductor.cluster_create(ctx, values)
context.set_current_cluster_id(cluster.id)
sender.notify(ctx, cluster.id, cluster.name, "New",
"create")
_add_ports_for_auto_sg(ctx, cluster, plugin)
# validating cluster
try:
plugin.recommend_configs(cluster)
cluster = c_u.change_cluster_status(
cluster, c_u.CLUSTER_STATUS_VALIDATING)
quotas.check_cluster(cluster)
plugin.validate(cluster)
except Exception as e:
with excutils.save_and_reraise_exception():
c_u.change_cluster_status(
cluster, c_u.CLUSTER_STATUS_ERROR, six.text_type(e))
OPS.provision_cluster(cluster.id)
return cluster
def get_multiple_cluster_name(num_of_clusters, name, counter):
return "%%s-%%0%dd" % len(str(num_of_clusters)) % (name, counter)
def _add_ports_for_auto_sg(ctx, cluster, plugin):
for ng in cluster.node_groups:
if ng.auto_security_group:
ports = {'open_ports': plugin.get_open_ports(ng)}
conductor.node_group_update(ctx, ng, ports)
def terminate_cluster(id):
context.set_current_cluster_id(id)
cluster = c_u.change_cluster_status(id, c_u.CLUSTER_STATUS_DELETING)
if cluster is None:
return
OPS.terminate_cluster(id)
sender.notify(context.ctx(), cluster.id, cluster.name, cluster.status,
"delete")
def update_cluster(id, values):
return conductor.cluster_update(context.ctx(), id, values)
# ClusterTemplate ops
def get_cluster_templates(**kwargs):
return conductor.cluster_template_get_all(context.ctx(), **kwargs)
def get_cluster_template(id):
return conductor.cluster_template_get(context.ctx(), id)
def create_cluster_template(values):
return conductor.cluster_template_create(context.ctx(), values)
def terminate_cluster_template(id):
return conductor.cluster_template_destroy(context.ctx(), id)
def update_cluster_template(id, values):
return conductor.cluster_template_update(context.ctx(), id, values)
# NodeGroupTemplate ops
def get_node_group_templates(**kwargs):
return conductor.node_group_template_get_all(context.ctx(), **kwargs)
def get_node_group_template(id):
return conductor.node_group_template_get(context.ctx(), id)
def create_node_group_template(values):
return conductor.node_group_template_create(context.ctx(), values)
def terminate_node_group_template(id):
return conductor.node_group_template_destroy(context.ctx(), id)
def update_node_group_template(id, values):
return conductor.node_group_template_update(context.ctx(), id, values)
# Plugins ops
def get_plugins():
return plugin_base.PLUGINS.get_plugins()
def get_plugin(plugin_name, version=None):
plugin = plugin_base.PLUGINS.get_plugin(plugin_name)
if plugin:
res = plugin.as_resource()
if version:
if version in plugin.get_versions():
configs = plugin.get_all_configs(version)
res._info['configs'] = [c.dict for c in configs]
processes = plugin.get_node_processes(version)
res._info['node_processes'] = processes
required_image_tags = plugin.get_required_image_tags(version)
res._info['required_image_tags'] = required_image_tags
else:
return None
return res
def convert_to_cluster_template(plugin_name, version, template_name,
config_file):
plugin = plugin_base.PLUGINS.get_plugin(plugin_name)
return plugin.convert(config_file, plugin_name, version,
urlparse.unquote(template_name),
conductor.cluster_template_create)
def construct_ngs_for_scaling(cluster, additional_node_groups):
ctx = context.ctx()
additional = {}
for ng in additional_node_groups:
count = ng['count']
ng['count'] = 0
ng_id = conductor.node_group_add(ctx, cluster, ng)
additional.update({ng_id: count})
return additional
# Image Registry
def get_images(name, tags):
return b.execute_with_retries(
nova.client().images.list_registered, name, tags)
def get_image(**kwargs):
if len(kwargs) == 1 and 'id' in kwargs:
return b.execute_with_retries(nova.client().images.get, kwargs['id'])
else:
return b.execute_with_retries(nova.client().images.find, **kwargs)
def get_registered_image(id):
return b.execute_with_retries(
nova.client().images.get_registered_image, id)
def register_image(image_id, username, description=None):
client = nova.client()
b.execute_with_retries(
client.images.set_description, image_id, username, description)
return b.execute_with_retries(client.images.get, image_id)
def unregister_image(image_id):
client = nova.client()
b.execute_with_retries(client.images.unset_description, image_id)
return b.execute_with_retries(client.images.get, image_id)
def add_image_tags(image_id, tags):
client = nova.client()
b.execute_with_retries(client.images.tag, image_id, tags)
return b.execute_with_retries(client.images.get, image_id)
def remove_image_tags(image_id, tags):
client = nova.client()
b.execute_with_retries(client.images.untag, image_id, tags)
return b.execute_with_retries(client.images.get, image_id)
|
zhangjunli177/sahara
|
sahara/service/api.py
|
Python
|
apache-2.0
| 9,423
|
'''
This file is part of GEAR.
GEAR is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/lgpl.html>.
Author: Jeremie Passerin geerem@hotmail.com
Company: Studio Nest (TM)
Date: 2010 / 11 / 15
'''
## @package gear.xsi.rig.component.control_02
# @author Jeremie Passerin
#
##########################################################
# GLOBAL
##########################################################
import os
from gear.xsi import xsi, c, dynDispatch, XSIFactory
from gear.xsi.rig.component import MainComponent
import gear.xsi.ppg as ppg
import gear.xsi.parameter as par
import gear.xsi.primitive as pri
##########################################################
# COMPONENT
##########################################################
## The main component class.
class Component(MainComponent):
# =====================================================
# OBJECTS
# =====================================================
## Build the initial hierarchy of the component.\n
# Add the root and if needed the shadow root
# @param self
def initialHierarchy(self):
# Get color
if self.settings["color"] == 0:
color = self.color_ik
elif self.settings["color"] == 1:
color = self.color_fk
else:
color = [self.settings["color_r"], self.settings["color_g"], self.settings["color_b"]]
# Copy controler from given icon
self.root = self.guide.prim["icon"].create(self.model, self.getName("ctl"), self.guide.tra["icon"], color)
par.setRotOrder(self.root, self.settings["rotOrderSel"])
self.addToCtlGroup(self.root)
# Shd --------------------------------
if self.options["shadowRig"] and self.settings["shadow"]:
self.shd_org = self.rig.shd_org.AddNull(self.getName("shd_org"))
self.addToGroup(self.shd_org, "hidden")
## Add all the objects needed to create the component.
# @param self
def addObjects(self):
if self.settings["shadow"]:
self.addShadow(self.root, 0)
# =====================================================
# PROPERTY
# =====================================================
## Add parameters to the anim and setup properties to control the component.
# @param self
def addParameters(self):
return
## Define the layout of the anim and setup properties.
# @param self
def addLayout(self):
return
## Define the logic of the anim and setup properties.
# @param self
def addLogic(self):
return
# =====================================================
# OPERATORS
# =====================================================
## Apply operators, constraints, expressions to the hierarchy.\n
# In order to keep the code clean and easier to debug,
# we shouldn't create any new object in this method.
# @param self
def addOperators(self):
return
# =====================================================
# CONNECTOR
# =====================================================
## Post connection action
# @param self
def postConnect(self):
# As the root is the main controler, we need to set the neutral pose and lock the parameters after the connection.
xsi.SetNeutralPose(self.root)
keyables = [name for name in self.local_params if self.settings[name]]
par.setKeyableParameters(self.root, keyables)
|
miquelcampos/GEAR_mc
|
gear/xsi/rig/component/control_02/__init__.py
|
Python
|
lgpl-3.0
| 4,103
|
from redcmd.api import subcmd, Arg
from redlib.api.colors import colorlist
from .base import SourceSubcommand
from ...source.bitmap import BitmapParams
__all__ = ['ColorSubcommand']
class ColorSubcommand(SourceSubcommand):
@subcmd
def color(self, color=Arg(choices=colorlist.keys(), default=None, opt=True)):
'''Solid color
run "wallp list colors" to see a list of all supported colors'''
bp = BitmapParams(color=color)
self.change_wallpaper(bp)
|
amol9/wallp
|
wallp/subcmd/source/color.py
|
Python
|
mit
| 466
|
from core.himesis import Himesis
class HEEnum(Himesis):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HEEnum.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HEEnum, self).__init__(name='HEEnum', num_nodes=41, edges=[])
# Add the edges
self.add_edges([[0, 3], [3, 6], [1, 4], [4, 7], [6, 8], [8, 32], [6, 9], [9, 33], [6, 10], [10, 34], [6, 11], [11, 35], [7, 12], [12, 36], [13, 14], [14, 36], [13, 15], [15, 32], [7, 16], [16, 37], [17, 18], [18, 37], [17, 19], [19, 33], [7, 20], [20, 38], [21, 22], [22, 38], [21, 23], [23, 34], [7, 24], [24, 39], [25, 26], [26, 39], [25, 27], [27, 35], [7, 28], [28, 40], [29, 30], [30, 40], [29, 31], [31, 5], [0, 2], [2, 1]])
# Set the graph attributes
self["mm__"] = ['HimesisMM']
self["name"] = """EEnum"""
self["GUID__"] = 7654161894967553874
# Set the node attributes
self.vs[0]["mm__"] = """MatchModel"""
self.vs[0]["GUID__"] = 1307303483444417734
self.vs[1]["mm__"] = """ApplyModel"""
self.vs[1]["GUID__"] = 3504163004540724927
self.vs[2]["mm__"] = """paired_with"""
self.vs[2]["GUID__"] = 5726761322004392716
self.vs[3]["mm__"] = """match_contains"""
self.vs[3]["GUID__"] = 971648018768726432
self.vs[4]["mm__"] = """apply_contains"""
self.vs[4]["GUID__"] = 553989825117412628
self.vs[5]["name"] = """solveRef"""
self.vs[5]["mm__"] = """Constant"""
self.vs[5]["Type"] = """'String'"""
self.vs[5]["GUID__"] = 8129336481298719663
self.vs[6]["name"] = """"""
self.vs[6]["classtype"] = """EEnum"""
self.vs[6]["mm__"] = """EEnum"""
self.vs[6]["cardinality"] = """+"""
self.vs[6]["GUID__"] = 4452859244075915800
self.vs[7]["name"] = """"""
self.vs[7]["classtype"] = """EEnum"""
self.vs[7]["mm__"] = """EEnum"""
self.vs[7]["cardinality"] = """1"""
self.vs[7]["GUID__"] = 7939553005770725094
self.vs[8]["mm__"] = """hasAttribute_S"""
self.vs[8]["GUID__"] = 611489334023751963
self.vs[9]["mm__"] = """hasAttribute_S"""
self.vs[9]["GUID__"] = 8556568753334853994
self.vs[10]["mm__"] = """hasAttribute_S"""
self.vs[10]["GUID__"] = 767130501981851133
self.vs[11]["mm__"] = """hasAttribute_S"""
self.vs[11]["GUID__"] = 3855434591641882583
self.vs[12]["mm__"] = """hasAttribute_T"""
self.vs[12]["GUID__"] = 515184369805836064
self.vs[13]["name"] = """eq_"""
self.vs[13]["mm__"] = """Equation"""
self.vs[13]["GUID__"] = 2948338452213920919
self.vs[14]["mm__"] = """leftExpr"""
self.vs[14]["GUID__"] = 46637178724164708
self.vs[15]["mm__"] = """rightExpr"""
self.vs[15]["GUID__"] = 4338582618187923895
self.vs[16]["mm__"] = """hasAttribute_T"""
self.vs[16]["GUID__"] = 861030570117443596
self.vs[17]["name"] = """eq_"""
self.vs[17]["mm__"] = """Equation"""
self.vs[17]["GUID__"] = 6151982392055898063
self.vs[18]["mm__"] = """leftExpr"""
self.vs[18]["GUID__"] = 1609113787377456741
self.vs[19]["mm__"] = """rightExpr"""
self.vs[19]["GUID__"] = 444037790205565412
self.vs[20]["mm__"] = """hasAttribute_T"""
self.vs[20]["GUID__"] = 481267516306685126
self.vs[21]["name"] = """eq_"""
self.vs[21]["mm__"] = """Equation"""
self.vs[21]["GUID__"] = 4727491185378819236
self.vs[22]["mm__"] = """leftExpr"""
self.vs[22]["GUID__"] = 3037886333904471379
self.vs[23]["mm__"] = """rightExpr"""
self.vs[23]["GUID__"] = 8261777769425074135
self.vs[24]["mm__"] = """hasAttribute_T"""
self.vs[24]["GUID__"] = 1293936003138066239
self.vs[25]["name"] = """eq_"""
self.vs[25]["mm__"] = """Equation"""
self.vs[25]["GUID__"] = 7801521492907750953
self.vs[26]["mm__"] = """leftExpr"""
self.vs[26]["GUID__"] = 4846782441602656687
self.vs[27]["mm__"] = """rightExpr"""
self.vs[27]["GUID__"] = 6787192741022691846
self.vs[28]["mm__"] = """hasAttribute_T"""
self.vs[28]["GUID__"] = 3746637733830018153
self.vs[29]["name"] = """eq_"""
self.vs[29]["mm__"] = """Equation"""
self.vs[29]["GUID__"] = 3412135983838302268
self.vs[30]["mm__"] = """leftExpr"""
self.vs[30]["GUID__"] = 5978595871180659885
self.vs[31]["mm__"] = """rightExpr"""
self.vs[31]["GUID__"] = 4472642673142270151
self.vs[32]["name"] = """name"""
self.vs[32]["mm__"] = """Attribute"""
self.vs[32]["Type"] = """'String'"""
self.vs[32]["GUID__"] = 7264718490474641399
self.vs[33]["name"] = """instanceClassName"""
self.vs[33]["mm__"] = """Attribute"""
self.vs[33]["Type"] = """'String'"""
self.vs[33]["GUID__"] = 8783955521896071225
self.vs[34]["name"] = """instanceTypeName"""
self.vs[34]["mm__"] = """Attribute"""
self.vs[34]["Type"] = """'String'"""
self.vs[34]["GUID__"] = 3186768817090548018
self.vs[35]["name"] = """serializable"""
self.vs[35]["mm__"] = """Attribute"""
self.vs[35]["Type"] = """'String'"""
self.vs[35]["GUID__"] = 7526899069599200399
self.vs[36]["name"] = """name"""
self.vs[36]["mm__"] = """Attribute"""
self.vs[36]["Type"] = """'String'"""
self.vs[36]["GUID__"] = 3008455561639940624
self.vs[37]["name"] = """instanceClassName"""
self.vs[37]["mm__"] = """Attribute"""
self.vs[37]["Type"] = """'String'"""
self.vs[37]["GUID__"] = 4528943186717645528
self.vs[38]["name"] = """instanceTypeName"""
self.vs[38]["mm__"] = """Attribute"""
self.vs[38]["Type"] = """'String'"""
self.vs[38]["GUID__"] = 5356737330693104171
self.vs[39]["name"] = """serializable"""
self.vs[39]["mm__"] = """Attribute"""
self.vs[39]["Type"] = """'String'"""
self.vs[39]["GUID__"] = 3090344177434721216
self.vs[40]["name"] = """ApplyAttribute"""
self.vs[40]["mm__"] = """Attribute"""
self.vs[40]["Type"] = """'String'"""
self.vs[40]["GUID__"] = 1205462576710215199
|
levilucio/SyVOLT
|
ECore_Copier_MM/transformation-Large/HEEnum.py
|
Python
|
mit
| 6,426
|
#!/usr/bin/python3
# vim: ts=4:sw=4:expandtab
# BleachBit
# Copyright (C) 2008-2021 Andrew Ziem
# https://www.bleachbit.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
_DIRECTORY_TO_WALK = 'dist'
_DIRECTORY_TO_SEPARATE = r'dist\share\locale'
# NSIS script needs a prefix in the paths included in order to access them.
_DIRECTORY_PREFIX_FOR_NSIS = '..\\'
_FILES_TO_INSTALL_PATH = r'windows\NsisInclude\FilesToInstall.nsh'
_FILES_TO_UNINSTALL_PATH = r'windows\NsisInclude\FilesToUninstall.nsh'
_LOCALE_TO_INSTALL_PATH = r'windows\NsisInclude\LocaleToInstall.nsh'
_LOCALE_TO_UNINSTALL_PATH = r'windows\NsisInclude\LocaleToUninstall.nsh'
_REBOOTOK_FILE_EXTENSIONS = ['exe', 'pyd', 'dll']
def write_nsis_expressions_to_files():
"""Generates files containing NSIS expressions for add and remove files."""
(install_expressions,
uninstall_expressions) = _generate_add_remove_nsis_expressions(
_DIRECTORY_TO_WALK, directory_to_separate=_DIRECTORY_TO_SEPARATE
)
(install_locale_expressions,
uninstall_locale_expressions) = _generate_add_remove_nsis_expressions(
_DIRECTORY_TO_SEPARATE, parent_directory=os.path.relpath(
_DIRECTORY_TO_SEPARATE, _DIRECTORY_TO_WALK)
)
nsisexpressions_filename = [
(install_expressions, _FILES_TO_INSTALL_PATH),
(uninstall_expressions, _FILES_TO_UNINSTALL_PATH),
(install_locale_expressions, _LOCALE_TO_INSTALL_PATH),
(uninstall_locale_expressions, _LOCALE_TO_UNINSTALL_PATH),
]
for nsis_expressions, filename in nsisexpressions_filename:
with open(filename, 'w') as f:
f.write(nsis_expressions)
def _generate_add_remove_nsis_expressions(directory_to_walk, parent_directory=None, directory_to_separate=None):
"""Generates NSIS expressions for copy and delete the files and folders from given folder."""
install_expressions = ''
uninstall_expressions = ''
for relative_folder_path, full_filepaths, file_names in _walk_with_parent_directory_and_filepaths(directory_to_walk, directory_to_separate, parent_directory):
install_expressions += 'SetOutPath "$INSTDIR\\{}"\n'.format(
relative_folder_path)
uninstall_expressions = 'RMDir "$INSTDIR\\{}"'.format(
relative_folder_path) + '\n' + uninstall_expressions
if full_filepaths:
install_expressions += 'File '
install_expressions += ' '.join(
['"{}{}"'.format(_DIRECTORY_PREFIX_FOR_NSIS, filepath)
for filepath in full_filepaths]
)
install_expressions += '\n'
folder_path = '$INSTDIR' if relative_folder_path == '.' else '$INSTDIR\\{}'.format(
relative_folder_path)
delete_expressions = _generate_delete_expressions(
file_names, folder_path)
uninstall_expressions = '\n'.join(
delete_expressions) + '\n' + uninstall_expressions
return install_expressions, uninstall_expressions[:-1]
def _generate_delete_expressions(file_names, folder_path):
delete_expressions = []
for file_name in file_names:
file_extension = os.path.splitext(file_name)[1][1:]
reboot_ok = '/REBOOTOK ' if file_extension in _REBOOTOK_FILE_EXTENSIONS else ''
delete_expressions.append(r'Delete {}"{}\{}"'.format(
reboot_ok, folder_path, file_name))
return delete_expressions
def _walk_with_parent_directory_and_filepaths(directory_to_walk, directory_to_separate=None, parent_directory=None):
for root, _dirs, files in os.walk(directory_to_walk):
if directory_to_separate is not None and root.startswith(directory_to_separate):
continue
filepaths = [os.path.join(root, file) for file in files]
if parent_directory is not None:
rel_directory_path = os.path.relpath(root, directory_to_walk)
if rel_directory_path == '.':
yield (parent_directory, filepaths, files)
else:
yield (
os.path.join(parent_directory,
rel_directory_path), filepaths, files
)
else:
yield (
os.path.relpath(root, directory_to_walk), filepaths, files
)
|
bleachbit/bleachbit
|
windows/NsisUtilities.py
|
Python
|
gpl-3.0
| 4,903
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.core.urlresolvers import reverse
from django.test import TestCase, override_settings
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailcore.models import Page, Site
from wagtail.wagtailredirects import models
@override_settings(ALLOWED_HOSTS=['testserver', 'localhost', 'test.example.com', 'other.example.com'])
class TestRedirects(TestCase):
fixtures = ['test.json']
def test_path_normalisation(self):
# Shortcut to normalise function (to keep things tidy)
normalise_path = models.Redirect.normalise_path
# Create a path
path = normalise_path('/Hello/world.html;fizz=three;buzz=five?foo=Bar&Baz=quux2')
# Test against equivalant paths
self.assertEqual(path, normalise_path( # The exact same URL
'/Hello/world.html;fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertEqual(path, normalise_path( # Scheme, hostname and port ignored
'http://mywebsite.com:8000/Hello/world.html;fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertEqual(path, normalise_path( # Leading slash can be omitted
'Hello/world.html;fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertEqual(path, normalise_path( # Trailing slashes are ignored
'Hello/world.html/;fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertEqual(path, normalise_path( # Fragments are ignored
'/Hello/world.html;fizz=three;buzz=five?foo=Bar&Baz=quux2#cool'
))
self.assertEqual(path, normalise_path( # Order of query string parameters is ignored
'/Hello/world.html;fizz=three;buzz=five?Baz=quux2&foo=Bar'
))
self.assertEqual(path, normalise_path( # Order of parameters is ignored
'/Hello/world.html;buzz=five;fizz=three?foo=Bar&Baz=quux2'
))
self.assertEqual(path, normalise_path( # Leading whitespace
' /Hello/world.html;fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertEqual(path, normalise_path( # Trailing whitespace
'/Hello/world.html;fizz=three;buzz=five?foo=Bar&Baz=quux2 '
))
# Test against different paths
self.assertNotEqual(path, normalise_path( # 'hello' is lowercase
'/hello/world.html;fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertNotEqual(path, normalise_path( # No '.html'
'/Hello/world;fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertNotEqual(path, normalise_path( # Query string parameter value has wrong case
'/Hello/world.html;fizz=three;buzz=five?foo=bar&Baz=Quux2'
))
self.assertNotEqual(path, normalise_path( # Query string parameter name has wrong case
'/Hello/world.html;fizz=three;buzz=five?foo=Bar&baz=quux2'
))
self.assertNotEqual(path, normalise_path( # Parameter value has wrong case
'/Hello/world.html;fizz=three;buzz=Five?foo=Bar&Baz=quux2'
))
self.assertNotEqual(path, normalise_path( # Parameter name has wrong case
'/Hello/world.html;Fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertNotEqual(path, normalise_path( # Missing params
'/Hello/world.html?foo=Bar&Baz=quux2'
))
self.assertNotEqual(path, normalise_path( # 'WORLD' is uppercase
'/Hello/WORLD.html;fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertNotEqual(path, normalise_path( # '.htm' is not the same as '.html'
'/Hello/world.htm;fizz=three;buzz=five?foo=Bar&Baz=quux2'
))
self.assertEqual('/', normalise_path('/')) # '/' should stay '/'
# Normalise some rubbish to make sure it doesn't crash
normalise_path('This is not a URL')
normalise_path('//////hello/world')
normalise_path('!#@%$*')
normalise_path('C:\\Program Files (x86)\\Some random program\\file.txt')
def test_unicode_path_normalisation(self):
normalise_path = models.Redirect.normalise_path
self.assertEqual(
'/here/tésting-ünicode', # stays the same
normalise_path('/here/tésting-ünicode')
)
self.assertNotEqual( # Doesn't remove unicode characters
'/here/testing-unicode',
normalise_path('/here/tésting-ünicode')
)
def test_basic_redirect(self):
# Create a redirect
redirect = models.Redirect(old_path='/redirectme', redirect_link='/redirectto')
redirect.save()
# Navigate to it
response = self.client.get('/redirectme/')
# Check that we were redirected
self.assertRedirects(response, '/redirectto', status_code=301, fetch_redirect_response=False)
def test_temporary_redirect(self):
# Create a redirect
redirect = models.Redirect(old_path='/redirectme', redirect_link='/redirectto', is_permanent=False)
redirect.save()
# Navigate to it
response = self.client.get('/redirectme/')
# Check that we were redirected temporarily
self.assertRedirects(response, '/redirectto', status_code=302, fetch_redirect_response=False)
def test_redirect_stripping_query_string(self):
# Create a redirect which includes a query string
redirect_with_query_string = models.Redirect(
old_path='/redirectme?foo=Bar', redirect_link='/with-query-string-only'
)
redirect_with_query_string.save()
# ... and another redirect without the query string
redirect_without_query_string = models.Redirect(old_path='/redirectme', redirect_link='/without-query-string')
redirect_without_query_string.save()
# Navigate to the redirect with the query string
r_matching_qs = self.client.get('/redirectme/?foo=Bar')
self.assertRedirects(r_matching_qs, '/with-query-string-only', status_code=301, fetch_redirect_response=False)
# Navigate to the redirect with a different query string
# This should strip out the query string and match redirect_without_query_string
r_no_qs = self.client.get('/redirectme/?utm_source=irrelevant')
self.assertRedirects(r_no_qs, '/without-query-string', status_code=301, fetch_redirect_response=False)
def test_redirect_to_page(self):
christmas_page = Page.objects.get(url_path='/home/events/christmas/')
models.Redirect.objects.create(old_path='/xmas', redirect_page=christmas_page)
response = self.client.get('/xmas/', HTTP_HOST='test.example.com')
self.assertRedirects(response, 'http://test.example.com/events/christmas/', status_code=301, fetch_redirect_response=False)
def test_redirect_from_any_site(self):
contact_page = Page.objects.get(url_path='/home/contact-us/')
Site.objects.create(hostname='other.example.com', port=80, root_page=contact_page)
christmas_page = Page.objects.get(url_path='/home/events/christmas/')
models.Redirect.objects.create(old_path='/xmas', redirect_page=christmas_page)
# no site was specified on the redirect, so it should redirect regardless of hostname
response = self.client.get('/xmas/', HTTP_HOST='localhost')
self.assertRedirects(response, 'http://localhost/events/christmas/', status_code=301, fetch_redirect_response=False)
response = self.client.get('/xmas/', HTTP_HOST='other.example.com')
self.assertRedirects(response, 'http://localhost/events/christmas/', status_code=301, fetch_redirect_response=False)
def test_redirect_from_specific_site(self):
contact_page = Page.objects.get(url_path='/home/contact-us/')
other_site = Site.objects.create(hostname='other.example.com', port=80, root_page=contact_page)
christmas_page = Page.objects.get(url_path='/home/events/christmas/')
models.Redirect.objects.create(old_path='/xmas', redirect_page=christmas_page, site=other_site)
# redirect should only respond when site is other_site
response = self.client.get('/xmas/', HTTP_HOST='other.example.com')
self.assertRedirects(response, 'http://localhost/events/christmas/', status_code=301, fetch_redirect_response=False)
response = self.client.get('/xmas/', HTTP_HOST='localhost')
self.assertEqual(response.status_code, 404)
def test_duplicate_redirects_when_match_is_for_generic(self):
contact_page = Page.objects.get(url_path='/home/contact-us/')
site = Site.objects.create(hostname='other.example.com', port=80, root_page=contact_page)
# two redirects, one for any site, one for specific
models.Redirect.objects.create(old_path='/xmas', redirect_link='/generic')
models.Redirect.objects.create(site=site, old_path='/xmas', redirect_link='/site-specific')
response = self.client.get('/xmas/')
# the redirect which matched was /generic
self.assertRedirects(response, '/generic', status_code=301, fetch_redirect_response=False)
def test_duplicate_redirects_with_query_string_when_match_is_for_generic(self):
contact_page = Page.objects.get(url_path='/home/contact-us/')
site = Site.objects.create(hostname='other.example.com', port=80, root_page=contact_page)
# two redirects, one for any site, one for specific, both with query string
models.Redirect.objects.create(old_path='/xmas?foo=Bar', redirect_link='/generic-with-query-string')
models.Redirect.objects.create(site=site, old_path='/xmas?foo=Bar', redirect_link='/site-specific-with-query-string')
# and two redirects, one for any site, one for specific, without query strings
models.Redirect.objects.create(old_path='/xmas', redirect_link='/generic')
models.Redirect.objects.create(site=site, old_path='/xmas', redirect_link='/site-specific')
response = self.client.get('/xmas/?foo=Bar')
# the redirect which matched was /generic-with-query-string
self.assertRedirects(response, '/generic-with-query-string', status_code=301, fetch_redirect_response=False)
# now use a non-matching query string
response = self.client.get('/xmas/?foo=Baz')
# the redirect which matched was /generic
self.assertRedirects(response, '/generic', status_code=301, fetch_redirect_response=False)
def test_duplicate_redirects_when_match_is_for_specific(self):
contact_page = Page.objects.get(url_path='/home/contact-us/')
site = Site.objects.create(hostname='other.example.com', port=80, root_page=contact_page)
# two redirects, one for any site, one for specific
models.Redirect.objects.create(old_path='/xmas', redirect_link='/generic')
models.Redirect.objects.create(site=site, old_path='/xmas', redirect_link='/site-specific')
response = self.client.get('/xmas/', HTTP_HOST='other.example.com')
# the redirect which matched was /site-specific
self.assertRedirects(response, 'http://other.example.com/site-specific', status_code=301, fetch_redirect_response=False)
def test_duplicate_redirects_with_query_string_when_match_is_for_specific_with_qs(self):
contact_page = Page.objects.get(url_path='/home/contact-us/')
site = Site.objects.create(hostname='other.example.com', port=80, root_page=contact_page)
# two redirects, one for any site, one for specific, both with query string
models.Redirect.objects.create(old_path='/xmas?foo=Bar', redirect_link='/generic-with-query-string')
models.Redirect.objects.create(site=site, old_path='/xmas?foo=Bar', redirect_link='/site-specific-with-query-string')
# and two redirects, one for any site, one for specific, without query strings
models.Redirect.objects.create(old_path='/xmas', redirect_link='/generic')
models.Redirect.objects.create(site=site, old_path='/xmas', redirect_link='/site-specific')
response = self.client.get('/xmas/?foo=Bar', HTTP_HOST='other.example.com')
# the redirect which matched was /site-specific-with-query-string
self.assertRedirects(response, 'http://other.example.com/site-specific-with-query-string', status_code=301, fetch_redirect_response=False)
# now use a non-matching query string
response = self.client.get('/xmas/?foo=Baz', HTTP_HOST='other.example.com')
# the redirect which matched was /site-specific
self.assertRedirects(response, 'http://other.example.com/site-specific', status_code=301, fetch_redirect_response=False)
def test_duplicate_page_redirects_when_match_is_for_specific(self):
contact_page = Page.objects.get(url_path='/home/contact-us/')
site = Site.objects.create(hostname='other.example.com', port=80, root_page=contact_page)
christmas_page = Page.objects.get(url_path='/home/events/christmas/')
# two redirects, one for any site, one for specific
models.Redirect.objects.create(old_path='/xmas', redirect_page=contact_page)
models.Redirect.objects.create(site=site, old_path='/xmas', redirect_page=christmas_page)
# request for specific site gets the christmas_page redirect, not accessible from other.example.com
response = self.client.get('/xmas/', HTTP_HOST='other.example.com')
self.assertRedirects(response, 'http://localhost/events/christmas/', status_code=301, fetch_redirect_response=False)
def test_redirect_with_unicode_in_url(self):
redirect = models.Redirect(old_path='/tésting-ünicode', redirect_link='/redirectto')
redirect.save()
# Navigate to it
response = self.client.get('/tésting-ünicode/')
self.assertRedirects(response, '/redirectto', status_code=301, fetch_redirect_response=False)
def test_redirect_with_encoded_url(self):
redirect = models.Redirect(old_path='/t%C3%A9sting-%C3%BCnicode', redirect_link='/redirectto')
redirect.save()
# Navigate to it
response = self.client.get('/t%C3%A9sting-%C3%BCnicode/')
self.assertRedirects(response, '/redirectto', status_code=301, fetch_redirect_response=False)
class TestRedirectsIndexView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailredirects:index'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailredirects/index.html')
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
def test_listing_order(self):
for i in range(0, 10):
models.Redirect.objects.create(old_path="/redirect%d" % i, redirect_link="http://torchbox.com/")
models.Redirect.objects.create(old_path="/aaargh", redirect_link="http://torchbox.com/")
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['redirects'][0].old_path, "/aaargh")
class TestRedirectsAddView(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailredirects:add'), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailredirects:add'), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailredirects/add.html')
def test_add(self):
response = self.post({
'old_path': '/test',
'site': '',
'is_permanent': 'on',
'redirect_link': 'http://www.test.com/',
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailredirects:index'))
# Check that the redirect was created
redirects = models.Redirect.objects.filter(old_path='/test')
self.assertEqual(redirects.count(), 1)
self.assertEqual(redirects.first().redirect_link, 'http://www.test.com/')
self.assertEqual(redirects.first().site, None)
def test_add_with_site(self):
localhost = Site.objects.get(hostname='localhost')
response = self.post({
'old_path': '/test',
'site': localhost.id,
'is_permanent': 'on',
'redirect_link': 'http://www.test.com/',
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailredirects:index'))
# Check that the redirect was created
redirects = models.Redirect.objects.filter(old_path='/test')
self.assertEqual(redirects.count(), 1)
self.assertEqual(redirects.first().redirect_link, 'http://www.test.com/')
self.assertEqual(redirects.first().site, localhost)
def test_add_validation_error(self):
response = self.post({
'old_path': '',
'site': '',
'is_permanent': 'on',
'redirect_link': 'http://www.test.com/',
})
# Should not redirect to index
self.assertEqual(response.status_code, 200)
def test_cannot_add_duplicate_with_no_site(self):
models.Redirect.objects.create(old_path='/test', site=None, redirect_link='http://elsewhere.com/')
response = self.post({
'old_path': '/test',
'site': '',
'is_permanent': 'on',
'redirect_link': 'http://www.test.com/',
})
# Should not redirect to index
self.assertEqual(response.status_code, 200)
def test_cannot_add_duplicate_on_same_site(self):
localhost = Site.objects.get(hostname='localhost')
models.Redirect.objects.create(old_path='/test', site=localhost, redirect_link='http://elsewhere.com/')
response = self.post({
'old_path': '/test',
'site': localhost.pk,
'is_permanent': 'on',
'redirect_link': 'http://www.test.com/',
})
# Should not redirect to index
self.assertEqual(response.status_code, 200)
def test_can_reuse_path_on_other_site(self):
localhost = Site.objects.get(hostname='localhost')
contact_page = Page.objects.get(url_path='/home/contact-us/')
other_site = Site.objects.create(hostname='other.example.com', port=80, root_page=contact_page)
models.Redirect.objects.create(old_path='/test', site=localhost, redirect_link='http://elsewhere.com/')
response = self.post({
'old_path': '/test',
'site': other_site.pk,
'is_permanent': 'on',
'redirect_link': 'http://www.test.com/',
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailredirects:index'))
# Check that the redirect was created
redirects = models.Redirect.objects.filter(redirect_link='http://www.test.com/')
self.assertEqual(redirects.count(), 1)
class TestRedirectsEditView(TestCase, WagtailTestUtils):
def setUp(self):
# Create a redirect to edit
self.redirect = models.Redirect(old_path='/test', redirect_link='http://www.test.com/')
self.redirect.save()
# Login
self.login()
def get(self, params={}, redirect_id=None):
return self.client.get(reverse('wagtailredirects:edit', args=(redirect_id or self.redirect.id, )), params)
def post(self, post_data={}, redirect_id=None):
return self.client.post(reverse('wagtailredirects:edit', args=(redirect_id or self.redirect.id, )), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailredirects/edit.html')
def test_nonexistant_redirect(self):
self.assertEqual(self.get(redirect_id=100000).status_code, 404)
def test_edit(self):
response = self.post({
'old_path': '/test',
'is_permanent': 'on',
'site': '',
'redirect_link': 'http://www.test.com/ive-been-edited',
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailredirects:index'))
# Check that the redirect was edited
redirects = models.Redirect.objects.filter(old_path='/test')
self.assertEqual(redirects.count(), 1)
self.assertEqual(redirects.first().redirect_link, 'http://www.test.com/ive-been-edited')
self.assertEqual(redirects.first().site, None)
def test_edit_with_site(self):
localhost = Site.objects.get(hostname='localhost')
response = self.post({
'old_path': '/test',
'is_permanent': 'on',
'site': localhost.id,
'redirect_link': 'http://www.test.com/ive-been-edited',
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailredirects:index'))
# Check that the redirect was edited
redirects = models.Redirect.objects.filter(old_path='/test')
self.assertEqual(redirects.count(), 1)
self.assertEqual(redirects.first().redirect_link, 'http://www.test.com/ive-been-edited')
self.assertEqual(redirects.first().site, localhost)
def test_edit_validation_error(self):
response = self.post({
'old_path': '',
'is_permanent': 'on',
'site': '',
'redirect_link': 'http://www.test.com/ive-been-edited',
})
# Should not redirect to index
self.assertEqual(response.status_code, 200)
def test_edit_duplicate(self):
models.Redirect.objects.create(old_path='/othertest', site=None, redirect_link='http://elsewhere.com/')
response = self.post({
'old_path': '/othertest',
'is_permanent': 'on',
'site': '',
'redirect_link': 'http://www.test.com/ive-been-edited',
})
# Should not redirect to index
self.assertEqual(response.status_code, 200)
class TestRedirectsDeleteView(TestCase, WagtailTestUtils):
def setUp(self):
# Create a redirect to edit
self.redirect = models.Redirect(old_path='/test', redirect_link='http://www.test.com/')
self.redirect.save()
# Login
self.login()
def get(self, params={}, redirect_id=None):
return self.client.get(reverse('wagtailredirects:delete', args=(redirect_id or self.redirect.id, )), params)
def post(self, redirect_id=None):
return self.client.post(reverse(
'wagtailredirects:delete', args=(redirect_id or self.redirect.id, )
))
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailredirects/confirm_delete.html')
def test_nonexistant_redirect(self):
self.assertEqual(self.get(redirect_id=100000).status_code, 404)
def test_delete(self):
response = self.post()
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailredirects:index'))
# Check that the redirect was deleted
redirects = models.Redirect.objects.filter(old_path='/test')
self.assertEqual(redirects.count(), 0)
|
iansprice/wagtail
|
wagtail/wagtailredirects/tests.py
|
Python
|
bsd-3-clause
| 23,927
|
def numeral(n):
numerals = (n//1000) * "M"
numerals += _numeral((n%1000)//100, "C", "D", "M")
numerals += _numeral((n%100)//10, "X", "L", "C")
numerals += _numeral(n%10, "I", "V", "X")
return numerals
def _numeral(num, i, v, x):
n, m = divmod(num, 5)
if n == 1:
return i + x if m == 4 else v + m * i
else:
return i + v if m == 4 else m * i
|
CubicComet/exercism-python-solutions
|
roman-numerals/roman_numerals.py
|
Python
|
agpl-3.0
| 390
|
from xml.dom import minidom
#from lxml import objectify
import struct
import base64
import binascii
from xml.etree.ElementTree import parse
import datetime
import time
hrvlist = "1.0;2.01;3.02;4.05;3.02394"
hrvstr = hrvlist.split(';')
def getVLFp():
VLFp = ((float(hrvstr[1]) + float(hrvstr[2])) / float(hrvstr[0])) * 100
return VLFp
def getLFp():
LFp = float(hrvstr[3]) / float(hrvstr[0]) * 100
return LFp
def getHFp():
HFp = float(hrvstr[4]) / float(hrvstr[0]) * 100
return HFp
globalTime = time.time()
RRIlist = []
cnt = 0
timelist = []
def xmlParse(xmlString):
#global RRIlist()
rawdata, HR, tag, Rpeak, HQ, F1, F2, Y = "","","","","","","",""
xmlDoc = minidom.parseString(xmlString)#objectify.fromstring(xml)
try :
HR = xmlDoc.getElementsByTagName('H')
if HR != "":
print (HR[0].firstChild.data)
except :
HR = "0"
modulename = xmlDoc.getElementsByTagName('M')
rawdata = xmlDoc.getElementsByTagName('D')
tag = xmlDoc.getElementsByTagName('T')
Rpeak = xmlDoc.getElementsByTagName('P')
HQ = xmlDoc.getElementsByTagName('S')
F1 = xmlDoc.getElementsByTagName('F1')
F2 = xmlDoc.getElementsByTagName('F2')
Y = xmlDoc.getElementsByTagName('Y')
smaplerate = xmlDoc.getElementsByTagName('R')
# print (HQ[0].firstChild.data)
if HQ[0].firstChild.data == "1":
RRI = ""
try:
RRI = xmlDoc.getElementsByTagName('I')
if RRI != "":
dataRRI = RRI[0].firstChild.data.split(',')
curTime = time.time()
for i in dataRRI:
global globalTime
#print (curTime-globalTime)
RRIlist.append(i)
timelist.append(str(round((curTime-globalTime)/1000,6)))
# print(dataRRI)
except:
RRI = ""
#print(getVLFp(),getLFp(),getHFp())
string = "<B><E><M>B57A7</M><R>255</R><D>goKBgYKCgICBgYGBg4SDg4SFhIOEhYOBgoSEg4SGhoWFh4qIh4eIh4aGiIiHhoiIh4eJiYiIiYqJiYqMjI2Oj4+OjY2Mi4qKi4mJiYmIiImKiYmJiomIiYqLiomKi4uNkpulscDMz8WxlHRiYGdvdXp/goOFh4qKi4yOjo6PkJGPj5CRkZCSlJOUlpeXl5iZmZmZmpubnJ6gn5+foJ6dnZ+cmpqamZeXl5mXlpaWlZKRkZCOjI6Oi4qKiomIiImIh4aJiIeFh4iHh4mKiIeIioiHiIqJh4aIiIaFhoaDgYGDg4GCg4GAfn9+fHx/fnx6fH17ent8enh5e3p7fH59</D><S>1</S><Z>4</Z><T>856</T><H>85</H><I>196,193</I><P>91,102</P></E><USER>5207588497703702855</USER><TIMESTAMP>1</TIMESTAMP></B>"
xmlParse(string)
print (RRIlist)
print (timelist)
#target = "goKBgYKCgICBgYGBg4SDg4SFhIOEhYOBgoSEg4SGhoWFh4qIh4eIh4aGiIiHhoiIh4eJiYiIiYqJiYqMjI2Oj4+OjY2Mi4qKi4mJiYmIiImKiYmJiomIiYqLiomKi4uNkpulscDMz8WxlHRiYGdvdXp/goOFh4qKi4yOjo6PkJGPj5CRkZCSlJOUlpeXl5iZmZmZmpubnJ6gn5+foJ6dnZ+cmpqamZeXl5mXlpaWlZKRkZCOjI6Oi4qKiomIiImIh4aJiIeFh4iHh4mKiIeIioiHiIqJh4aIiIaFhoaDgYGDg4GCg4GAfn9+fHx/fnx6fH17ent8enh5e3p7fH59"
#c , d = hello(string)
#print (c,d)
#dtgt = binascii.a2b_base64(target)
#for i in dtgt:
# print (str(dtgt[i]))
#print (dtgt.("ASCII"))
#format = ">ff"
#for i in range(100):
# print (struct.unpack_from(format,dtgt,8*i))
|
sonicyang/TIE
|
tools/parse.py
|
Python
|
gpl-2.0
| 2,829
|
#!/usr/bin/env python
import unittest
from lib.ll_string import String
class StringTests(unittest.TestCase):
def testExtractHelloWorld(self):
sv = String('$:+ -:-+ -|-: .. :| -*-:X -:. *-+X +. -|* |. -+-| .-: -:*X .+')
result = sv.extract_string()
self.assertEquals(result, "Hello World!")
class LlamaTests(unittest.TestCase):
""" Basic unit test class to check the above Fibonacci generator """
def setUp(self):
pass
# def testStopping(self):
# # Check the generator stopped when it should have
# self.assertEqual(FIB_STOP, len(self.fibs))
#
# def testNumbers(self):
# # Check the generated list against our known correct list
# for i in range(len(self.correct)):
# self.assertEqual(self.fibs[i], self.correct[i])
if __name__ == '__main__':
unittest.main()
|
autowitch/llama
|
llama_tests.py
|
Python
|
mit
| 862
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib import messages as Msg
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from forms import EmailAddressForm
from models import EmailAddress
from utils import send_activation, get_template, sort_email
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from signals import user_added_email, user_sent_activation, user_activated_email
@login_required
def email_add(request):
"""
User is logged and has a primary email address already
This will add an aditional email address to this User
"""
if request.method == 'POST':
form = EmailAddressForm(user=request.user, data=request.POST)
if form.is_valid():
email = form.save()
user_added_email.send(sender=EmailAddress, email_address=email)
Msg.add_message (request, Msg.SUCCESS, _('email address added'))
form = EmailAddressForm(user=request.user)
else:
form = EmailAddressForm(user=request.user)
emails_list = EmailAddress.objects.filter(user=request.user).order_by(*sort_email())
return render_to_response(get_template('emailmgr_email_list.html'),
{
'email_list': emails_list,
'email_form': form
},
context_instance=RequestContext(request)
)
@login_required
def email_make_primary(request, identifier="somekey"):
"""
User is logged in, has a second email that is already activated and
wants to make that the primary email address.
The User objects' email address will also be replace with this newly
primary email address, so Django internals work it the new primary address too
"""
email = get_object_or_404(EmailAddress, identifier__iexact=identifier.lower())
if email.is_active:
if email.is_primary:
Msg.add_message (request, Msg.SUCCESS, _('email address is already primary'))
else:
emails = EmailAddress.objects.filter(user=email.user)
for e in emails:
e.is_primary = False
e.save()
email.user.email = email.email
email.user.save()
email.is_primary = True
email.save()
Msg.add_message (request, Msg.SUCCESS, _('primary address changed'))
else:
Msg.add_message (request, Msg.SUCCESS, _('email address must be activated first'))
return HttpResponseRedirect(reverse('emailmgr_email_list'))
@login_required
def email_send_activation(request, identifier="somekey"):
"""
The user is logged in, has added a new email address to his/her account.
User can do anything with the newly added email, unless it is first activated.
This function will send an activation email to the currently primary email address
associated with the User's account
"""
email = get_object_or_404(EmailAddress, identifier__iexact=identifier.lower())
if email.is_active:
Msg.add_message (request, Msg.SUCCESS, _('email address already activated'))
else:
send_activation(email, request.is_secure())
email.is_activation_sent = True
email.save()
user_sent_activation.send(sender=EmailAddress, email_address=email)
Msg.add_message (request, Msg.SUCCESS, _('activation email sent'))
return HttpResponseRedirect(reverse('emailmgr_email_list'))
@login_required
def email_activate(request, identifier="somekey"):
"""
User is already logged in and the activation link will trigger the email address
in question to be activated. If the account is already active, then a message is
put in the message buffer indicating that the email is already active
"""
try:
email = EmailAddress.objects.get(identifier__iexact=identifier.lower())
except EmailAddress.DoesNotExist:
Msg.add_message (request, Msg.ERROR, _('email address not found'))
else:
if email.is_active:
Msg.add_message (request, Msg.SUCCESS, _('email address already active'))
else:
email.is_active = True
if not email.user.email:
email.user.email = email.email
email.is_primary = True
email.user.save()
email.save()
user_activated_email.send(sender=EmailAddress, email_address=email)
Msg.add_message (request, Msg.SUCCESS, _('email address is now active'))
return HttpResponseRedirect(reverse('emailmgr_email_list'))
@login_required
def email_delete(request, identifier="somekey"):
"""
Email needs to be removed from User's account, primary email address cannot be removed
"""
email = get_object_or_404(EmailAddress, identifier__iexact=identifier.lower())
if email.email == request.user.email:
Msg.add_message (request, Msg.ERROR, _('cannot remove primary email address'))
elif email.user != request.user:
Msg.add_message (request, Msg.ERROR, _('email address is not associated with this account'))
else:
email.delete()
Msg.add_message (request, Msg.SUCCESS, _('email address removed'))
return HttpResponseRedirect(reverse('emailmgr_email_list'))
@login_required
def email_list(request):
"""
All email address associated with User account will be passed into the template as a list
An ``add`` email form will be passed in the template so user can add new email inline
"""
form = EmailAddressForm(user=request.user)
emails_list = EmailAddress.objects.filter(user=request.user).order_by(*sort_email())
return render_to_response(get_template('emailmgr_email_list.html'),
{
'email_list': emails_list,
'email_form': form
},
context_instance=RequestContext(request)
)
|
eahneahn/free
|
lib/python2.7/site-packages/emailmgr/views.py
|
Python
|
agpl-3.0
| 6,307
|
"""
Some shared functions
.. deprecated:: 0.6.3
Should be moved to different places and this file removed,
but it needs refactoring.
"""
from __future__ import division
# Libraries.
import hashlib
import os
import stat
import subprocess
import sys
from binascii import hexlify
# Project imports.
import highlevelcrypto
import state
from addresses import decodeAddress, encodeVarint
from bmconfigparser import BMConfigParser
from debug import logger
from helper_sql import sqlQuery
from pyelliptic import arithmetic
myECCryptorObjects = {}
MyECSubscriptionCryptorObjects = {}
# The key in this dictionary is the RIPE hash which is encoded
# in an address and value is the address itself.
myAddressesByHash = {}
# The key in this dictionary is the tag generated from the address.
myAddressesByTag = {}
broadcastSendersForWhichImWatching = {}
def isAddressInMyAddressBook(address):
"""Is address in my addressbook?"""
queryreturn = sqlQuery(
'''select address from addressbook where address=?''',
address)
return queryreturn != []
# At this point we should really just have a isAddressInMy(book, address)...
def isAddressInMySubscriptionsList(address):
"""Am I subscribed to this address?"""
queryreturn = sqlQuery(
'''select * from subscriptions where address=?''',
str(address))
return queryreturn != []
def isAddressInMyAddressBookSubscriptionsListOrWhitelist(address):
"""
Am I subscribed to this address, is it in my addressbook or whitelist?
"""
if isAddressInMyAddressBook(address):
return True
queryreturn = sqlQuery(
'''SELECT address FROM whitelist where address=?'''
''' and enabled = '1' ''',
address)
if queryreturn != []:
return True
queryreturn = sqlQuery(
'''select address from subscriptions where address=?'''
''' and enabled = '1' ''',
address)
if queryreturn != []:
return True
return False
def decodeWalletImportFormat(WIFstring):
# pylint: disable=inconsistent-return-statements
"""
Convert private key from base58 that's used in the config file to
8-bit binary string
"""
fullString = arithmetic.changebase(WIFstring, 58, 256)
privkey = fullString[:-4]
if fullString[-4:] != \
hashlib.sha256(hashlib.sha256(privkey).digest()).digest()[:4]:
logger.critical(
'Major problem! When trying to decode one of your'
' private keys, the checksum failed. Here are the first'
' 6 characters of the PRIVATE key: %s',
str(WIFstring)[:6]
)
os._exit(0) # pylint: disable=protected-access
# return ""
elif privkey[0] == '\x80': # checksum passed
return privkey[1:]
logger.critical(
'Major problem! When trying to decode one of your private keys,'
' the checksum passed but the key doesn\'t begin with hex 80.'
' Here is the PRIVATE key: %s', WIFstring
)
os._exit(0) # pylint: disable=protected-access
def reloadMyAddressHashes():
"""Reload keys for user's addresses from the config file"""
logger.debug('reloading keys from keys.dat file')
myECCryptorObjects.clear()
myAddressesByHash.clear()
myAddressesByTag.clear()
# myPrivateKeys.clear()
keyfileSecure = checkSensitiveFilePermissions(os.path.join(
state.appdata, 'keys.dat'))
hasEnabledKeys = False
for addressInKeysFile in BMConfigParser().addresses():
isEnabled = BMConfigParser().getboolean(addressInKeysFile, 'enabled')
if isEnabled:
hasEnabledKeys = True
# status
addressVersionNumber, streamNumber, hashobj = decodeAddress(addressInKeysFile)[1:]
if addressVersionNumber in (2, 3, 4):
# Returns a simple 32 bytes of information encoded
# in 64 Hex characters, or null if there was an error.
privEncryptionKey = hexlify(decodeWalletImportFormat(
BMConfigParser().get(addressInKeysFile, 'privencryptionkey')))
# It is 32 bytes encoded as 64 hex characters
if len(privEncryptionKey) == 64:
myECCryptorObjects[hashobj] = \
highlevelcrypto.makeCryptor(privEncryptionKey)
myAddressesByHash[hashobj] = addressInKeysFile
tag = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber) +
encodeVarint(streamNumber) + hashobj).digest()).digest()[32:]
myAddressesByTag[tag] = addressInKeysFile
else:
logger.error(
'Error in reloadMyAddressHashes: Can\'t handle'
' address versions other than 2, 3, or 4.\n'
)
if not keyfileSecure:
fixSensitiveFilePermissions(os.path.join(
state.appdata, 'keys.dat'), hasEnabledKeys)
def reloadBroadcastSendersForWhichImWatching():
"""
Reinitialize runtime data for the broadcasts I'm subscribed to
from the config file
"""
broadcastSendersForWhichImWatching.clear()
MyECSubscriptionCryptorObjects.clear()
queryreturn = sqlQuery('SELECT address FROM subscriptions where enabled=1')
logger.debug('reloading subscriptions...')
for row in queryreturn:
address, = row
# status
addressVersionNumber, streamNumber, hashobj = decodeAddress(address)[1:]
if addressVersionNumber == 2:
broadcastSendersForWhichImWatching[hashobj] = 0
# Now, for all addresses, even version 2 addresses,
# we should create Cryptor objects in a dictionary which we will
# use to attempt to decrypt encrypted broadcast messages.
if addressVersionNumber <= 3:
privEncryptionKey = hashlib.sha512(
encodeVarint(addressVersionNumber) +
encodeVarint(streamNumber) + hashobj
).digest()[:32]
MyECSubscriptionCryptorObjects[hashobj] = \
highlevelcrypto.makeCryptor(hexlify(privEncryptionKey))
else:
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber) +
encodeVarint(streamNumber) + hashobj
).digest()).digest()
tag = doubleHashOfAddressData[32:]
privEncryptionKey = doubleHashOfAddressData[:32]
MyECSubscriptionCryptorObjects[tag] = \
highlevelcrypto.makeCryptor(hexlify(privEncryptionKey))
def fixPotentiallyInvalidUTF8Data(text):
"""Sanitise invalid UTF-8 strings"""
try:
unicode(text, 'utf-8')
return text
except:
return 'Part of the message is corrupt. The message cannot be' \
' displayed the normal way.\n\n' + repr(text)
def checkSensitiveFilePermissions(filename):
"""
:param str filename: path to the file
:return: True if file appears to have appropriate permissions.
"""
if sys.platform == 'win32':
# .. todo:: This might deserve extra checks by someone familiar with
# Windows systems.
return True
elif sys.platform[:7] == 'freebsd':
# FreeBSD file systems are the same as major Linux file systems
present_permissions = os.stat(filename)[0]
disallowed_permissions = stat.S_IRWXG | stat.S_IRWXO
return present_permissions & disallowed_permissions == 0
try:
# Skip known problems for non-Win32 filesystems
# without POSIX permissions.
fstype = subprocess.check_output(
'stat -f -c "%%T" %s' % (filename),
shell=True,
stderr=subprocess.STDOUT
)
if 'fuseblk' in fstype:
logger.info(
'Skipping file permissions check for %s.'
' Filesystem fuseblk detected.', filename)
return True
except:
# Swallow exception here, but we might run into trouble later!
logger.error('Could not determine filesystem type. %s', filename)
present_permissions = os.stat(filename)[0]
disallowed_permissions = stat.S_IRWXG | stat.S_IRWXO
return present_permissions & disallowed_permissions == 0
# Fixes permissions on a sensitive file.
def fixSensitiveFilePermissions(filename, hasEnabledKeys):
"""Try to change file permissions to be more restrictive"""
if hasEnabledKeys:
logger.warning(
'Keyfile had insecure permissions, and there were enabled'
' keys. The truly paranoid should stop using them immediately.')
else:
logger.warning(
'Keyfile had insecure permissions, but there were no enabled keys.'
)
try:
present_permissions = os.stat(filename)[0]
disallowed_permissions = stat.S_IRWXG | stat.S_IRWXO
allowed_permissions = ((1 << 32) - 1) ^ disallowed_permissions
new_permissions = (
allowed_permissions & present_permissions)
os.chmod(filename, new_permissions)
logger.info('Keyfile permissions automatically fixed.')
except Exception:
logger.exception('Keyfile permissions could not be fixed.')
raise
|
PeterSurda/PyBitmessage
|
src/shared.py
|
Python
|
mit
| 9,293
|
#4! = 4*3*2*1
#5! = 5*4*3*2*1
'''
i = 1
result = 1
while i<=4:
result = result * i
i+=1
print(result)
'''
#5! => 5*4!
#4! => 4*3!
'''
def xxx(num):
num * xxxx(num-1)
def xx(num):
num * xxx(num-1)
def getNums(num):
num * xx(num-1)
getNums(4)
'''
def getNums(num):
if num>1:
return num * getNums(num-1)
else:
return num
result = getNums(4)
print(result)
|
jameswatt2008/jameswatt2008.github.io
|
python/Python基础/截图和代码/函数-下/11-递归.py
|
Python
|
gpl-2.0
| 419
|
import socket
import struct
def expect_packet(sock, name, expected):
if len(expected) > 0:
rlen = len(expected)
else:
rlen = 1
packet_recvd = sock.recv(rlen)
return packet_matches(name, packet_recvd, expected)
def packet_matches(name, recvd, expected):
if recvd != expected:
print("FAIL: Received incorrect "+name+".")
try:
print("Received: "+to_string(recvd))
except struct.error:
print("Received (not decoded): "+recvd)
try:
print("Expected: "+to_string(expected))
except struct.error:
print("Expected (not decoded): "+expected)
return 0
else:
return 1
def do_client_connect(connect_packet, connack_packet, hostname="localhost", port=1888, timeout=60, connack_error="connack"):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(timeout)
sock.connect((hostname, port))
sock.send(connect_packet)
if expect_packet(sock, connack_error, connack_packet):
return sock
else:
sock.close()
raise ValueError
def remaining_length(packet):
l = min(5, len(packet))
all_bytes = struct.unpack("!"+"B"*l, packet[:l])
mult = 1
rl = 0
for i in range(1,l-1):
byte = all_bytes[i]
rl += (byte & 127) * mult
mult *= 128
if byte & 128 == 0:
packet = packet[i+1:]
break
return (packet, rl)
def to_string(packet):
if len(packet) == 0:
return ""
packet0 = struct.unpack("!B", packet[0])
packet0 = packet0[0]
cmd = packet0 & 0xF0
if cmd == 0x00:
# Reserved
return "0x00"
elif cmd == 0x10:
# CONNECT
(packet, rl) = remaining_length(packet)
pack_format = "!H" + str(len(packet)-2) + 's'
(slen, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + str(slen)+'sBBH' + str(len(packet)-slen-4) + 's'
(protocol, proto_ver, flags, keepalive, packet) = struct.unpack(pack_format, packet)
s = "CONNECT, proto="+protocol+str(proto_ver)+", keepalive="+str(keepalive)
if flags&2:
s = s+", clean-session"
else:
s = s+", durable"
pack_format = "!H" + str(len(packet)-2) + 's'
(slen, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + str(slen)+'s' + str(len(packet)-slen) + 's'
(client_id, packet) = struct.unpack(pack_format, packet)
s = s+", id="+client_id
if flags&4:
pack_format = "!H" + str(len(packet)-2) + 's'
(slen, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + str(slen)+'s' + str(len(packet)-slen) + 's'
(will_topic, packet) = struct.unpack(pack_format, packet)
s = s+", will-topic="+will_topic
pack_format = "!H" + str(len(packet)-2) + 's'
(slen, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + str(slen)+'s' + str(len(packet)-slen) + 's'
(will_message, packet) = struct.unpack(pack_format, packet)
s = s+", will-message="+will_message
s = s+", will-qos="+str((flags&24)>>3)
s = s+", will-retain="+str((flags&32)>>5)
if flags&128:
pack_format = "!H" + str(len(packet)-2) + 's'
(slen, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + str(slen)+'s' + str(len(packet)-slen) + 's'
(username, packet) = struct.unpack(pack_format, packet)
s = s+", username="+username
if flags&64:
pack_format = "!H" + str(len(packet)-2) + 's'
(slen, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + str(slen)+'s' + str(len(packet)-slen) + 's'
(password, packet) = struct.unpack(pack_format, packet)
s = s+", password="+password
return s
elif cmd == 0x20:
# CONNACK
(cmd, rl, resv, rc) = struct.unpack('!BBBB', packet)
return "CONNACK, rl="+str(rl)+", res="+str(resv)+", rc="+str(rc)
elif cmd == 0x30:
# PUBLISH
dup = (packet0 & 0x08)>>3
qos = (packet0 & 0x06)>>1
retain = (packet0 & 0x01)
(packet, rl) = remaining_length(packet)
pack_format = "!H" + str(len(packet)-2) + 's'
(tlen, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + str(tlen)+'s' + str(len(packet)-tlen) + 's'
(topic, packet) = struct.unpack(pack_format, packet)
s = "PUBLISH, rl="+str(rl)+", topic="+topic+", qos="+str(qos)+", retain="+str(retain)+", dup="+str(dup)
if qos > 0:
pack_format = "!H" + str(len(packet)-2) + 's'
(mid, packet) = struct.unpack(pack_format, packet)
s = s + ", mid="+str(mid)
s = s + ", payload="+packet
return s
elif cmd == 0x40:
# PUBACK
(cmd, rl, mid) = struct.unpack('!BBH', packet)
return "PUBACK, rl="+str(rl)+", mid="+str(mid)
elif cmd == 0x50:
# PUBREC
(cmd, rl, mid) = struct.unpack('!BBH', packet)
return "PUBREC, rl="+str(rl)+", mid="+str(mid)
elif cmd == 0x60:
# PUBREL
dup = (packet0 & 0x08)>>3
(cmd, rl, mid) = struct.unpack('!BBH', packet)
return "PUBREL, rl="+str(rl)+", mid="+str(mid)+", dup="+str(dup)
elif cmd == 0x70:
# PUBCOMP
(cmd, rl, mid) = struct.unpack('!BBH', packet)
return "PUBCOMP, rl="+str(rl)+", mid="+str(mid)
elif cmd == 0x80:
# SUBSCRIBE
(packet, rl) = remaining_length(packet)
pack_format = "!H" + str(len(packet)-2) + 's'
(mid, packet) = struct.unpack(pack_format, packet)
s = "SUBSCRIBE, rl="+str(rl)+", mid="+str(mid)
topic_index = 0
while len(packet) > 0:
pack_format = "!H" + str(len(packet)-2) + 's'
(tlen, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + str(tlen)+'sB' + str(len(packet)-tlen-1) + 's'
(topic, qos, packet) = struct.unpack(pack_format, packet)
s = s + ", topic"+str(topic_index)+"="+topic+","+str(qos)
return s
elif cmd == 0x90:
# SUBACK
(packet, rl) = remaining_length(packet)
pack_format = "!H" + str(len(packet)-2) + 's'
(mid, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + "B"*len(packet)
granted_qos = struct.unpack(pack_format, packet)
s = "SUBACK, rl="+str(rl)+", mid="+str(mid)+", granted_qos="+str(granted_qos[0])
for i in range(1, len(granted_qos)-1):
s = s+", "+str(granted_qos[i])
return s
elif cmd == 0xA0:
# UNSUBSCRIBE
(packet, rl) = remaining_length(packet)
pack_format = "!H" + str(len(packet)-2) + 's'
(mid, packet) = struct.unpack(pack_format, packet)
s = "UNSUBSCRIBE, rl="+str(rl)+", mid="+str(mid)
topic_index = 0
while len(packet) > 0:
pack_format = "!H" + str(len(packet)-2) + 's'
(tlen, packet) = struct.unpack(pack_format, packet)
pack_format = "!" + str(tlen)+'s' + str(len(packet)-tlen) + 's'
(topic, packet) = struct.unpack(pack_format, packet)
s = s + ", topic"+str(topic_index)+"="+topic
return s
elif cmd == 0xB0:
# UNSUBACK
(cmd, rl, mid) = struct.unpack('!BBH', packet)
return "UNSUBACK, rl="+str(rl)+", mid="+str(mid)
elif cmd == 0xC0:
# PINGREQ
(cmd, rl) = struct.unpack('!BB', packet)
return "PINGREQ, rl="+str(rl)
elif cmd == 0xD0:
# PINGRESP
(cmd, rl) = struct.unpack('!BB', packet)
return "PINGRESP, rl="+str(rl)
elif cmd == 0xE0:
# DISCONNECT
(cmd, rl) = struct.unpack('!BB', packet)
return "DISCONNECT, rl="+str(rl)
elif cmd == 0xF0:
# Reserved
return "0xF0"
def gen_connect(client_id, clean_session=True, keepalive=60, username=None, password=None, will_topic=None, will_qos=0, will_retain=False, will_payload="", proto_ver=3):
if (proto_ver&0x7F) == 3 or proto_ver == 0:
remaining_length = 12
elif (proto_ver&0x7F) == 4:
remaining_length = 10
else:
raise ValueError
if client_id != None:
remaining_length = remaining_length + 2+len(client_id)
connect_flags = 0
if clean_session:
connect_flags = connect_flags | 0x02
if will_topic != None:
remaining_length = remaining_length + 2+len(will_topic) + 2+len(will_payload)
connect_flags = connect_flags | 0x04 | ((will_qos&0x03) << 3)
if will_retain:
connect_flags = connect_flags | 32
if username != None:
remaining_length = remaining_length + 2+len(username)
connect_flags = connect_flags | 0x80
if password != None:
connect_flags = connect_flags | 0x40
remaining_length = remaining_length + 2+len(password)
rl = pack_remaining_length(remaining_length)
packet = struct.pack("!B"+str(len(rl))+"s", 0x10, rl)
if (proto_ver&0x7F) == 3 or proto_ver == 0:
packet = packet + struct.pack("!H6sBBH", len("MQIsdp"), "MQIsdp", proto_ver, connect_flags, keepalive)
elif (proto_ver&0x7F) == 4:
packet = packet + struct.pack("!H4sBBH", len("MQTT"), "MQTT", proto_ver, connect_flags, keepalive)
if client_id != None:
packet = packet + struct.pack("!H"+str(len(client_id))+"s", len(client_id), client_id)
if will_topic != None:
packet = packet + struct.pack("!H"+str(len(will_topic))+"s", len(will_topic), will_topic)
if len(will_payload) > 0:
packet = packet + struct.pack("!H"+str(len(will_payload))+"s", len(will_payload), will_payload)
else:
packet = packet + struct.pack("!H", 0)
if username != None:
packet = packet + struct.pack("!H"+str(len(username))+"s", len(username), username)
if password != None:
packet = packet + struct.pack("!H"+str(len(password))+"s", len(password), password)
return packet
def gen_connack(resv=0, rc=0):
return struct.pack('!BBBB', 32, 2, resv, rc);
def gen_publish(topic, qos, payload=None, retain=False, dup=False, mid=0):
rl = 2+len(topic)
pack_format = "!BBH"+str(len(topic))+"s"
if qos > 0:
rl = rl + 2
pack_format = pack_format + "H"
if payload != None:
rl = rl + len(payload)
pack_format = pack_format + str(len(payload))+"s"
else:
payload = ""
pack_format = pack_format + "0s"
cmd = 48 | (qos<<1)
if retain:
cmd = cmd + 1
if dup:
cmd = cmd + 8
if qos > 0:
return struct.pack(pack_format, cmd, rl, len(topic), topic, mid, payload)
else:
return struct.pack(pack_format, cmd, rl, len(topic), topic, payload)
def gen_puback(mid):
return struct.pack('!BBH', 64, 2, mid)
def gen_pubrec(mid):
return struct.pack('!BBH', 80, 2, mid)
def gen_pubrel(mid, dup=False):
if dup:
cmd = 96+8+2
else:
cmd = 96+2
return struct.pack('!BBH', cmd, 2, mid)
def gen_pubcomp(mid):
return struct.pack('!BBH', 112, 2, mid)
def gen_subscribe(mid, topic, qos):
pack_format = "!BBHH"+str(len(topic))+"sB"
return struct.pack(pack_format, 130, 2+2+len(topic)+1, mid, len(topic), topic, qos)
def gen_suback(mid, qos):
return struct.pack('!BBHB', 144, 2+1, mid, qos)
def gen_unsubscribe(mid, topic):
pack_format = "!BBHH"+str(len(topic))+"s"
return struct.pack(pack_format, 162, 2+2+len(topic), mid, len(topic), topic)
def gen_unsuback(mid):
return struct.pack('!BBH', 176, 2, mid)
def gen_pingreq():
return struct.pack('!BB', 192, 0)
def gen_pingresp():
return struct.pack('!BB', 208, 0)
def gen_disconnect():
return struct.pack('!BB', 224, 0)
def pack_remaining_length(remaining_length):
s = ""
while True:
byte = remaining_length % 128
remaining_length = remaining_length // 128
# If there are more digits to encode, set the top bit of this digit
if remaining_length > 0:
byte = byte | 0x80
s = s + struct.pack("!B", byte)
if remaining_length == 0:
return s
|
lrr-tum/fast-lib
|
vendor/mosquitto-1.3.5/test/mosq_test.py
|
Python
|
lgpl-3.0
| 12,355
|
from flask_table import Table, Col, LinkCol
class Results(Table):
id = Col('Id', show=False)
email = Col('Email')
password = Col('Password', show=False)
registered_on = Col('Registered Date', show=False)
admin = Col('Admin Role', show=False)
confirmed = Col('Confirmed Email', show=False)
confirmed_on = Col('Confirmation Date', show=False)
password_reset_token = Col('Password Reset Token', show=False)
probot_control = Col('Can control a ProBot?')
edit = LinkCol('Edit', 'edit', url_kwargs=dict(id='id'))
|
VitorHugoAguiar/ProBot
|
ProBot_Server/ProbotProject/project/table.py
|
Python
|
agpl-3.0
| 549
|
import json,os,shelve
import asyncio,sys
DATAFILENAME="data"
def set_user_id(new_id):
_local_data["user_id"]=new_id
def set_login_token(token):
_local_data["login_token"]=token
def load_data():
global _local_data
if(os.path.exists(os.path.join(get_current_path(),DATAFILENAME))):
with open(os.path.join(get_current_path(),DATAFILENAME), 'r') as f:
try:
_local_data=json.loads(f.read())
except: _local_data={}
else:_local_data={}
def save_data():
with open(os.path.join(get_current_path(),DATAFILENAME), 'w') as f:
f.write(json.dumps(_local_data))
def get_user_id():
return _local_data.get("user_id")
def get_login_token():
return _local_data.get("login_token")
def get_template_path():
return os.path.join(get_current_path(),"templates")
def get_current_path():
if getattr(sys, 'frozen', False):
# we are running in a bundle
f = sys.executable
else:
# we are running in a normal Python environment
f = __file__
return os.path.dirname(os.path.abspath(f))
def get_client_version():
VERSIONFILE="client_version"
with open(os.path.join(get_current_path(),VERSIONFILE), 'r') as f:
return float(f.read().strip())
def get_sync_path():
return _local_data.get("sync_path",None)
def set_sync_path(path):
_local_data["sync_path"]=path
record=None
from contextlib import closing
import aiohttp # $ pip install aiohttp
download_semaphore = asyncio.Semaphore(5)
async def download_file(url,path):
chunk_size=1<<15
async with download_semaphore:
with closing(aiohttp.ClientSession()) as session:
filename = str(path)
response = await session.get(url)
with closing(response), open(filename, 'wb') as file:
while True: # save file
chunk = await response.content.read(chunk_size)
if not chunk:
break
file.write(chunk)
return filename
upload_semaphore = asyncio.Semaphore(5)
async def upload_file(url,data):
async with upload_semaphore:
with closing(aiohttp.ClientSession()) as session:
return await session.post(url, data=data)
import hashlib
def file_md5(filename):
h = hashlib.md5()
with open(filename, 'rb', buffering=0) as f:
for b in iter(lambda : f.read(128*1024), b''):
h.update(b)
return h.hexdigest()
|
dandfmd/Linfilesync
|
utils.py
|
Python
|
apache-2.0
| 2,486
|
# Spark BOT
access_token='prettylongstringofjibberjabber'
webhook_name = 'thehook'
webhook_url='http://someexposed.url.to.your.server.io'
# UCM AXL API
AXL_username='ucm_application_user_with_axl_rights'
AXL_password=''
# UCM EMAPI
EMAPI_username='ucm_application_user_with_em_proxy_rights'
EMAPI_password=''
# UCM Clusters to query
# name (just a reference), server where AXL is running, server where EMAPI is running
clusters = [
["Server1","axlserver.myorg.net","emserver.myorg.net"],
["Server2","axlserver2.myorg.net","emserver2.myorg.net"],
]
# Help message (markdown)
help_message = """
# I can log you in into your phone
Just say "log me in into <number>" and I'll do my best to do so.
When you are done for the day, just say "log me out" ...
### commands
- hello
- log me in into <number>
- log me out
- help
### limitation
- no EMCC
- Uses your primary profile only (cannot choose profile yet)
"""
|
jseynaev-cisco/em-login-bot
|
config.sample.py
|
Python
|
mit
| 957
|
# -*- coding:utf-8 -*-
import sys
import os
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
import dfcapi
amounts = [10,10,10,10,10,10,10,10,10,10,10,10]
class DfcapiTestCase(unittest.TestCase):
# CHECKKEY
def test_checkkey(self):
dfcapi.setCheckKeyUrl('http://httpbin.org/get')
response = dfcapi.checkApiKey('TEST-TEST-TEST-TEST','fee78bd3bf59bfb36238b3f67de0a6ea103de130')
self.assertEqual(response.code, 200)
self.assertEqual(response.body['headers']['Authorization'], "Basic VEVTVC1URVNULVRFU1QtVEVTVDpmZWU3OGJkM2JmNTliZmIzNjIzOGIzZjY3ZGUwYTZlYTEwM2RlMTMw")
# View Direct debits
def test_ViewDirectDebits(self):
dfcapi.setViewDirectDebitUrl('http://httpbin.org/get')
response = dfcapi.ViewDirectDebits('TEST-TEST-TEST-TEST','fee78bd3bf59bfb36238b3f67de0a6ea103de130','000101AA0001')
self.assertEqual(response.code, 200)
self.assertEqual(response.body['headers']['Authorization'], "Basic VEVTVC1URVNULVRFU1QtVEVTVDpmZWU3OGJkM2JmNTliZmIzNjIzOGIzZjY3ZGUwYTZlYTEwM2RlMTMw")
self.assertEqual(response.body['args']['dfc_reference'], "000101AA0001")
# View Direct debits Breakdown
def test_ViewDirectDebitsBreakdown(self):
dfcapi.setViewDirectDebitBreakdownUrl('http://httpbin.org/get')
response = dfcapi.ViewDirectDebitsBreakdown('TEST-TEST-TEST-TEST','fee78bd3bf59bfb36238b3f67de0a6ea103de130','000101AA0001')
self.assertEqual(response.code, 200)
self.assertEqual(response.body['headers']['Authorization'], "Basic VEVTVC1URVNULVRFU1QtVEVTVDpmZWU3OGJkM2JmNTliZmIzNjIzOGIzZjY3ZGUwYTZlYTEwM2RlMTMw")
self.assertEqual(response.body['args']['dfc_reference'], "000101AA0001")
# Create Direct Debits
def test_createDirectDebit(self):
dfcapi.setCreateDirectDebitUrl('http://httpbin.org/post')
response = dfcapi.createDirectDebit('TEST-TEST-TEST-TEST','fee78bd3bf59bfb36238b3f67de0a6ea103de130','0001','ABC00001','Mr','Joe','Bloggs','1 Park Lane','','','London','','E15 2JG',amounts,'joebloggs@email.com','00000000','000000','2015-01-01',12,1,'MONTH','Y','1970-01-01','01234567890','07777777777','Y','Gym Membership','',False)
self.assertEqual(response.code, 200)
self.assertEqual(response.body['data'], '{"payer": {"birth_date": "1970-01-01", "first_name": "Joe", "last_name": "Bloggs", "title": "Mr"}, "authentication": {"apikey": "TEST-TEST-TEST-TEST", "apisecret": "fee78bd3bf59bfb36238b3f67de0a6ea103de130", "club_ref_no": "0001"}, "contact": {"phone_number": "07777777777", "no_email": "Y", "email": "joebloggs@email.com", "mobile_number": "01234567890"}, "address": {"town": "London", "address1": "1 Park Lane", "address2": "", "address3": "", "county": "", "postcode": "E15 2JG", "skip_check": false}, "bank": {"sort_code": "000000", "account_number": "00000000"}, "subscription": {"reference": "ABC00001", "amounts": [10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10], "bacs_reference": "", "start_from": "2015-01-01", "roll_status": "Y", "installments": 12, "service_description": "Gym Membership"}}')
# Update Direct Debits
def test_updateDirectDebit(self):
dfcapi.setUpdateDirectDebitUrl('http://httpbin.org/post')
response = dfcapi.UpdateDirectDebit('TEST-TEST-TEST-TEST','fee78bd3bf59bfb36238b3f67de0a6ea103de130','000101AA0001','','','','','','','','','','','','','','','','','15','012015','','','','','')
self.assertEqual(response.code, 200)
self.assertEqual(response.body['data'], '{"payer": {"birth_date": "", "first_name": "", "last_name": "", "title": ""}, "general": {"installmentamount": "", "newamount": "", "applyfrom_paydate": "012015", "applyfrom": "", "paymentdate": "15", "yourref": "", "latepayment": "", "installmentduedate": ""}, "authentication": {"dfc_ref": "000101AA0001", "apikey": "TEST-TEST-TEST-TEST", "apisecret": "fee78bd3bf59bfb36238b3f67de0a6ea103de130"}, "contact": {"mobile": "", "phone": "", "email": ""}, "address": {"town": "", "address1": "", "address2": "", "address3": "", "county": "", "postcode": ""}, "bank": {"sort_code": "", "account_number": ""}}')
# Cancel Direct debits
def test_cancelDirectDebit(self):
dfcapi.setCancelDirectDebitUrl('http://httpbin.org/post')
response = dfcapi.CancelDirectDebit('TEST-TEST-TEST-TEST','fee78bd3bf59bfb36238b3f67de0a6ea103de130','000101AA0001','2015-01-01')
self.assertEqual(response.code, 200)
self.assertEqual(response.body['data'], '{"cancel": {"apply_from": "2015-01-01"}, "authentication": {"dfc_ref": "000101AA0001", "apikey": "TEST-TEST-TEST-TEST", "apisecret": "fee78bd3bf59bfb36238b3f67de0a6ea103de130"}}' )
if __name__ == '__main__':
unittest.main()
|
dfcplc/dfcapi-python
|
dfcapi/test/test_dfcapi.py
|
Python
|
mit
| 4,575
|
from django.conf.urls.defaults import *
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^$', 'django.contrib.auth.views.login'),
(r'^admin/', include(admin.site.urls)),
(r'^files/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_DOC_ROOT}),
(r'^login/$', 'django.contrib.auth.views.login'),
(r'^login$', 'django.contrib.auth.views.login'),
(r'^register/$', 'datastore.views.register'),
(r'^register$', 'datastore.views.register'),
(r'^profile/$', 'datastore.views.profile'),
(r'^profile$', 'datastore.views.profile'),
(r'^logout/$', 'datastore.views.logout_view'),
(r'^logout$', 'datastore.views.logout_view'),
(r'^deleteall/$', 'datastore.views.deleteall'),
(r'^deleteall$', 'datastore.views.deleteall'),
(r'^upload/$', 'datastore.views.upload'),
(r'^upload$', 'datastore.views.upload'),
(r'^query/$', 'datastore.views.query'),
(r'^query$', 'datastore.views.query'),
(r'^status/$', 'datastore.views.status'),
(r'^status$', 'datastore.views.status'),
(r'^display/$', 'datastore.views.display'),
(r'^display$', 'datastore.views.display'),
(r'^uploadrules/$', 'datastore.views.uploadrules'),
(r'^uploadrules$', 'datastore.views.uploadrules'),
(r'^deleterules/$', 'datastore.views.deleterules'),
(r'^deleterules$', 'datastore.views.deleterules'),
(r'^getrules/$', 'datastore.views.getrules'),
(r'^getrules$', 'datastore.views.getrules'),
#(r'^datastore/', include('datastore.urls')),
(r'privacyrules/$', 'datastore.views.privacyrules'),
(r'privacyrules$', 'datastore.views.privacyrules'),
(r'locationlabel/$', 'datastore.views.locationlabel'),
(r'locationlabel$', 'datastore.views.locationlabel'),
(r'search_rules/$', 'datastore.views.search_rules'),
(r'search_rules$', 'datastore.views.search_rules'),
(r'test/$', 'datastore.views.test'),
(r'test$', 'datastore.views.test'),
)
|
nesl/SensorSafeV1-DataStore
|
urls.py
|
Python
|
bsd-3-clause
| 1,987
|
import logging
import os
import ffmpeg
from metadata import Episode
LOGGER = logging.getLogger(__name__)
logging.getLogger('requests').setLevel(logging.WARNING)
def download_episode(episode: Episode, download_dir: str, extension: str) -> None:
"""Download episode
:param download_dir: Path to download directory
:param episode: Episode to download
"""
download_path = generate_file_path(episode, download_dir, extension)
LOGGER.info('Downloading episode: %s', episode)
LOGGER.debug('Playlist URL: %s', episode.best_playlist_url)
ffmpeg.input(episode.best_playlist_url).output(download_path).run()
def generate_file_path(episode: Episode, download_dir: str, extension: str) -> str:
"""Generate full file path from path name and file name
:param download_dir: Path to download directory
:param episode: Episode to download
:param extensions: File extension
:return Full file path
"""
if not download_dir:
return '{}.{}'.format(episode.file_name, extension)
os.makedirs(download_dir, exist_ok=True)
return '{}/{}.{}'.format(download_dir, episode.file_name, extension)
|
TheSimoms/NRK-Downloader
|
src/download.py
|
Python
|
gpl-2.0
| 1,158
|
import hail as hl
def densify(sparse_mt):
"""Convert sparse matrix table to a dense VCF-like representation by expanding reference blocks.
Parameters
----------
sparse_mt : :class:`.MatrixTable`
Sparse MatrixTable to densify. The first row key field must
be named ``locus`` and have type ``locus``. Must have an
``END`` entry field of type ``int32``.
Returns
-------
:class:`.MatrixTable`
The densified MatrixTable. The ``END`` entry field is dropped.
While computationally expensive, this
operation is necessary for many downstream analyses, and should be thought of as
roughly costing as much as reading a matrix table created by importing a dense
project VCF.
"""
if list(sparse_mt.row_key)[0] != 'locus' or not isinstance(sparse_mt.locus.dtype, hl.tlocus):
raise ValueError("first row key field must be named 'locus' and have type 'locus'")
if 'END' not in sparse_mt.entry or sparse_mt.END.dtype != hl.tint32:
raise ValueError("'densify' requires 'END' entry field of type 'int32'")
col_key_fields = list(sparse_mt.col_key)
contigs = sparse_mt.locus.dtype.reference_genome.contigs
contig_idx_map = hl.literal({contigs[i]: i for i in range(len(contigs))}, 'dict<str, int32>')
mt = sparse_mt.annotate_rows(__contig_idx=contig_idx_map[sparse_mt.locus.contig])
mt = mt.annotate_entries(__contig=mt.__contig_idx)
t = mt._localize_entries('__entries', '__cols')
t = t.annotate(
__entries=hl.rbind(
hl.scan.array_agg(
lambda entry: hl.scan._prev_nonnull(hl.or_missing(hl.is_defined(entry.END), entry)),
t.__entries),
lambda prev_entries: hl.map(
lambda i:
hl.rbind(
prev_entries[i], t.__entries[i],
lambda prev_entry, entry:
hl.if_else(
(~hl.is_defined(entry)
& (prev_entry.END >= t.locus.position)
& (prev_entry.__contig == t.__contig_idx)),
prev_entry,
entry)),
hl.range(0, hl.len(t.__entries)))))
mt = t._unlocalize_entries('__entries', '__cols', col_key_fields)
mt = mt.drop('__contig_idx', '__contig', 'END')
return mt
|
danking/hail
|
hail/python/hail/experimental/vcf_combiner/densify.py
|
Python
|
mit
| 2,369
|
import argparse
import subprocess
import os
here = os.path.abspath(os.path.dirname(__file__))
wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
def build(*args, **kwargs):
subprocess.check_call(["docker",
"build",
"--tag", "wpt:local",
here])
def parser_run():
parser = argparse.ArgumentParser()
parser.add_argument("--rebuild", action="store_true", help="Force rebuild of image")
parser.add_argument("--checkout", action="store",
help="Revision to checkout in the image. "
"If this is not supplied we mount the wpt checkout on the host as "
"/home/test/web-platform-tests/")
parser.add_argument("--privileged", action="store_true",
help="Run the image in priviledged mode (required for emulators)")
return parser
def run(*args, **kwargs):
if kwargs["rebuild"]:
build()
args = ["docker", "run"]
args.extend(["--security-opt", "seccomp:%s" %
os.path.join(wpt_root, "tools", "docker", "seccomp.json")])
if kwargs["privileged"]:
args.append("--privileged")
if kwargs["checkout"]:
args.extend(["--env", "REF==%s" % kwargs["checkout"]])
else:
args.extend(["--mount",
"type=bind,source=%s,target=/home/test/web-platform-tests" % wpt_root])
args.extend(["-it", "wpt:local"])
proc = subprocess.Popen(args)
proc.wait()
return proc.returncode
|
UK992/servo
|
tests/wpt/web-platform-tests/tools/docker/frontend.py
|
Python
|
mpl-2.0
| 1,571
|
# -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2017 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import Path
import PathScripts.PathDressup as PathDressup
import PathScripts.PathGeom as PathGeom
import PathScripts.PathLog as PathLog
import PathScripts.PathUtil as PathUtil
import PathScripts.PathUtils as PathUtils
import copy
import math
from PathScripts.PathDressupTagPreferences import HoldingTagPreferences
from PathScripts.PathUtils import waiting_effects
from PySide import QtCore
# lazily loaded modules
from lazy_loader.lazy_loader import LazyLoader
Part = LazyLoader('Part', globals(), 'Part')
PathLog.setLevel(PathLog.Level.INFO, PathLog.thisModule())
#PathLog.trackModule()
failures = []
# Qt translation handling
def translate(context, text, disambig=None):
return QtCore.QCoreApplication.translate(context, text, disambig)
def debugEdge(edge, prefix, force=False):
if force or PathLog.getLevel(PathLog.thisModule()) == PathLog.Level.DEBUG:
pf = edge.valueAt(edge.FirstParameter)
pl = edge.valueAt(edge.LastParameter)
if type(edge.Curve) == Part.Line or type(edge.Curve) == Part.LineSegment:
print("%s %s((%.2f, %.2f, %.2f) - (%.2f, %.2f, %.2f))" % (prefix, type(edge.Curve), pf.x, pf.y, pf.z, pl.x, pl.y, pl.z))
else:
pm = edge.valueAt((edge.FirstParameter+edge.LastParameter)/2)
print("%s %s((%.2f, %.2f, %.2f) - (%.2f, %.2f, %.2f) - (%.2f, %.2f, %.2f))" % (prefix, type(edge.Curve), pf.x, pf.y, pf.z, pm.x, pm.y, pm.z, pl.x, pl.y, pl.z))
def debugMarker(vector, label, color=None, radius=0.5):
if PathLog.getLevel(PathLog.thisModule()) == PathLog.Level.DEBUG:
obj = FreeCAD.ActiveDocument.addObject("Part::Sphere", label)
obj.Label = label
obj.Radius = radius
obj.Placement = FreeCAD.Placement(vector, FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1), 0))
if color:
obj.ViewObject.ShapeColor = color
def debugCylinder(vector, r, height, label, color=None):
if PathLog.getLevel(PathLog.thisModule()) == PathLog.Level.DEBUG:
obj = FreeCAD.ActiveDocument.addObject("Part::Cylinder", label)
obj.Label = label
obj.Radius = r
obj.Height = height
obj.Placement = FreeCAD.Placement(vector, FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1), 0))
obj.ViewObject.Transparency = 90
if color:
obj.ViewObject.ShapeColor = color
def debugCone(vector, r1, r2, height, label, color=None):
if PathLog.getLevel(PathLog.thisModule()) == PathLog.Level.DEBUG:
obj = FreeCAD.ActiveDocument.addObject("Part::Cone", label)
obj.Label = label
obj.Radius1 = r1
obj.Radius2 = r2
obj.Height = height
obj.Placement = FreeCAD.Placement(vector, FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1), 0))
obj.ViewObject.Transparency = 90
if color:
obj.ViewObject.ShapeColor = color
class Tag:
def __init__(self, nr, x, y, width, height, angle, radius, enabled=True):
PathLog.track("%.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %d" % (x, y, width, height, angle, radius, enabled))
self.nr = nr
self.x = x
self.y = y
self.width = math.fabs(width)
self.height = math.fabs(height)
self.actualHeight = self.height
self.angle = math.fabs(angle)
self.radius = radius if FreeCAD.Units.Quantity == type(radius) else FreeCAD.Units.Quantity(radius, FreeCAD.Units.Length)
self.enabled = enabled
self.isSquare = False
# initialized later
self.toolRadius = None
self.realRadius = None
self.r1 = None
self.r2 = None
self.solid = None
self.z = None
def fullWidth(self):
return 2 * self.toolRadius + self.width
def originAt(self, z):
return FreeCAD.Vector(self.x, self.y, z)
def bottom(self):
return self.z
def top(self):
return self.z + self.actualHeight
def createSolidsAt(self, z, R):
self.z = z
self.toolRadius = R
r1 = self.fullWidth() / 2
self.r1 = r1
self.r2 = r1
height = self.height * 1.01
radius = 0
if PathGeom.isRoughly(90, self.angle) and height > 0:
# cylinder
self.isSquare = True
self.solid = Part.makeCylinder(r1, height)
radius = min(min(self.radius, r1), self.height)
PathLog.debug("Part.makeCylinder(%f, %f)" % (r1, height))
elif self.angle > 0.0 and height > 0.0:
# cone
rad = math.radians(self.angle)
tangens = math.tan(rad)
dr = height / tangens
if dr < r1:
# with top
r2 = r1 - dr
s = height / math.sin(rad)
radius = min(r2, s) * math.tan((math.pi - rad)/2) * 0.95
else:
# triangular
r2 = 0
height = r1 * tangens * 1.01
self.actualHeight = height
self.r2 = r2
PathLog.debug("Part.makeCone(%f, %f, %f)" % (r1, r2, height))
self.solid = Part.makeCone(r1, r2, height)
else:
# degenerated case - no tag
PathLog.debug("Part.makeSphere(%f / 10000)" % (r1))
self.solid = Part.makeSphere(r1 / 10000)
if not PathGeom.isRoughly(0, R): # testing is easier if the solid is not rotated
angle = -PathGeom.getAngle(self.originAt(0)) * 180 / math.pi
PathLog.debug("solid.rotate(%f)" % angle)
self.solid.rotate(FreeCAD.Vector(0, 0, 0), FreeCAD.Vector(0, 0, 1), angle)
orig = self.originAt(z - 0.01 * self.actualHeight)
PathLog.debug("solid.translate(%s)" % orig)
self.solid.translate(orig)
radius = min(self.radius, radius)
self.realRadius = radius
if not PathGeom.isRoughly(0, radius.Value):
PathLog.debug("makeFillet(%.4f)" % radius)
self.solid = self.solid.makeFillet(radius, [self.solid.Edges[0]])
def filterIntersections(self, pts, face):
if type(face.Surface) == Part.Cone or type(face.Surface) == Part.Cylinder or type(face.Surface) == Part.Toroid:
PathLog.track("it's a cone/cylinder, checking z")
return list([pt for pt in pts if pt.z >= self.bottom() and pt.z <= self.top()])
if type(face.Surface) == Part.Plane:
PathLog.track("it's a plane, checking R")
c = face.Edges[0].Curve
if (type(c) == Part.Circle):
return list([pt for pt in pts if (pt - c.Center).Length <= c.Radius or PathGeom.isRoughly((pt - c.Center).Length, c.Radius)])
PathLog.error("==== we got a %s" % face.Surface)
def isPointOnEdge(self, pt, edge):
param = edge.Curve.parameter(pt)
if edge.FirstParameter <= param <= edge.LastParameter:
return True
if edge.LastParameter <= param <= edge.FirstParameter:
return True
if PathGeom.isRoughly(edge.FirstParameter, param) or PathGeom.isRoughly(edge.LastParameter, param):
return True
# print("-------- X %.2f <= %.2f <=%.2f (%.2f, %.2f, %.2f) %.2f:%.2f" % (edge.FirstParameter, param, edge.LastParameter, pt.x, pt.y, pt.z, edge.Curve.parameter(edge.valueAt(edge.FirstParameter)), edge.Curve.parameter(edge.valueAt(edge.LastParameter))))
# p1 = edge.Vertexes[0]
# f1 = edge.Curve.parameter(FreeCAD.Vector(p1.X, p1.Y, p1.Z))
# p2 = edge.Vertexes[1]
# f2 = edge.Curve.parameter(FreeCAD.Vector(p2.X, p2.Y, p2.Z))
return False
def nextIntersectionClosestTo(self, edge, solid, refPt):
# debugEdge(edge, 'intersects_')
vertexes = edge.common(solid).Vertexes
if vertexes:
pt = sorted(vertexes, key=lambda v: (v.Point - refPt).Length)[0].Point
debugEdge(edge, "intersects (%.2f, %.2f, %.2f) -> (%.2f, %.2f, %.2f)" % (refPt.x, refPt.y, refPt.z, pt.x, pt.y, pt.z))
return pt
return None
def intersects(self, edge, param):
def isDefinitelySmaller(z, zRef):
# Eliminate false positives of edges that just brush along the top of the tag
return z < zRef and not PathGeom.isRoughly(z, zRef, 0.01)
if self.enabled:
zFirst = edge.valueAt(edge.FirstParameter).z
zLast = edge.valueAt(edge.LastParameter).z
zMax = self.top()
if isDefinitelySmaller(zFirst, zMax) or isDefinitelySmaller(zLast, zMax):
return self.nextIntersectionClosestTo(edge, self.solid, edge.valueAt(param))
return None
def bbEdges(self):
edges = []
for i in range(12):
p1, p2 = self.solid.BoundBox.getEdge(i)
edges.append(Part.Edge(Part.LineSegment(p1, p2)))
return edges
def bbShow(self):
for e in self.bbEdges():
Part.show(e)
class MapWireToTag:
def __init__(self, edge, tag, i, segm, maxZ, hSpeed, vSpeed):
debugEdge(edge, 'MapWireToTag(%.2f, %.2f, %.2f)' % (i.x, i.y, i.z))
self.tag = tag
self.segm = segm
self.maxZ = maxZ
self.hSpeed = hSpeed
self.vSpeed = vSpeed
if PathGeom.pointsCoincide(edge.valueAt(edge.FirstParameter), i):
tail = edge
self.commands = []
debugEdge(tail, '.........=')
elif PathGeom.pointsCoincide(edge.valueAt(edge.LastParameter), i):
debugEdge(edge, '++++++++ .')
self.commands = PathGeom.cmdsForEdge(edge, segm=segm, hSpeed = self.hSpeed, vSpeed = self.vSpeed)
tail = None
else:
e, tail = PathGeom.splitEdgeAt(edge, i)
debugEdge(e, '++++++++ .')
self.commands = PathGeom.cmdsForEdge(e, segm=segm, hSpeed = self.hSpeed, vSpeed = self.vSpeed)
debugEdge(tail, '.........-')
self.initialEdge = edge
self.tail = tail
self.edges = []
self.entry = i
if tail:
PathLog.debug("MapWireToTag(%s - %s)" % (i, tail.valueAt(tail.FirstParameter)))
else:
PathLog.debug("MapWireToTag(%s - )" % i)
self.complete = False
self.haveProblem = False
# initialized later
self.edgePoints = None
self.edgesCleanup = None
self.edgesOrder = None
self.entryEdges = None
self.exit = None
self.exitEdges = None
self.finalEdge = None
self.offendingEdge = None
self.realEntry = None
self.realExit = None
def addEdge(self, edge):
debugEdge(edge, '..........')
self.edges.append(edge)
def needToFlipEdge(self, edge, p):
if PathGeom.pointsCoincide(edge.valueAt(edge.LastParameter), p):
return True, edge.valueAt(edge.FirstParameter)
return False, edge.valueAt(edge.LastParameter)
def isEntryOrExitStrut(self, e):
p1 = e.valueAt(e.FirstParameter)
p2 = e.valueAt(e.LastParameter)
if PathGeom.pointsCoincide(p1, self.entry) and p2.z >= self.entry.z:
return 1
if PathGeom.pointsCoincide(p2, self.entry) and p1.z >= self.entry.z:
return 1
if PathGeom.pointsCoincide(p1, self.exit) and p2.z >= self.exit.z:
return 2
if PathGeom.pointsCoincide(p2, self.exit) and p1.z >= self.exit.z:
return 2
return 0
def cleanupEdges(self, edges):
# want to remove all edges from the wire itself, and all internal struts
PathLog.track("+cleanupEdges")
PathLog.debug(" edges:")
if not edges:
return edges
for e in edges:
debugEdge(e, ' ')
PathLog.debug(":")
self.edgesCleanup = [copy.copy(edges)]
# remove any edge that has a point inside the tag solid
# and collect all edges that are connected to the entry and/or exit
self.entryEdges = []
self.exitEdges = []
self.edgePoints = []
for e in copy.copy(edges):
p1 = e.valueAt(e.FirstParameter)
p2 = e.valueAt(e.LastParameter)
self.edgePoints.append(p1)
self.edgePoints.append(p2)
if self.tag.solid.isInside(p1, PathGeom.Tolerance, False) or self.tag.solid.isInside(p2, PathGeom.Tolerance, False):
edges.remove(e)
debugEdge(e, '......... X0', False)
else:
if PathGeom.pointsCoincide(p1, self.entry) or PathGeom.pointsCoincide(p2, self.entry):
self.entryEdges.append(e)
if PathGeom.pointsCoincide(p1, self.exit) or PathGeom.pointsCoincide(p2, self.exit):
self.exitEdges.append(e)
self.edgesCleanup.append(copy.copy(edges))
# if there are no edges connected to entry/exit, it means the plunge in/out is vertical
# we need to add in the missing segment and collect the new entry/exit edges.
if not self.entryEdges:
PathLog.debug("fill entryEdges ...")
self.realEntry = sorted(self.edgePoints, key=lambda p: (p - self.entry).Length)[0]
self.entryEdges = list([e for e in edges if PathGeom.edgeConnectsTo(e, self.realEntry)])
edges.append(Part.Edge(Part.LineSegment(self.entry, self.realEntry)))
else:
self.realEntry = None
if not self.exitEdges:
PathLog.debug("fill exitEdges ...")
self.realExit = sorted(self.edgePoints, key=lambda p: (p - self.exit).Length)[0]
self.exitEdges = list([e for e in edges if PathGeom.edgeConnectsTo(e, self.realExit)])
edges.append(Part.Edge(Part.LineSegment(self.realExit, self.exit)))
else:
self.realExit = None
self.edgesCleanup.append(copy.copy(edges))
# if there are 2 edges attached to entry/exit, throw away the one that is "lower"
if len(self.entryEdges) > 1:
debugEdge(self.entryEdges[0], ' entry[0]', False)
debugEdge(self.entryEdges[1], ' entry[1]', False)
if self.entryEdges[0].BoundBox.ZMax < self.entryEdges[1].BoundBox.ZMax:
edges.remove(self.entryEdges[0])
debugEdge(e, '......... X1', False)
else:
edges.remove(self.entryEdges[1])
debugEdge(e, '......... X2', False)
if len(self.exitEdges) > 1:
debugEdge(self.exitEdges[0], ' exit[0]', False)
debugEdge(self.exitEdges[1], ' exit[1]', False)
if self.exitEdges[0].BoundBox.ZMax < self.exitEdges[1].BoundBox.ZMax:
if self.exitEdges[0] in edges:
edges.remove(self.exitEdges[0])
debugEdge(e, '......... X3', False)
else:
if self.exitEdges[1] in edges:
edges.remove(self.exitEdges[1])
debugEdge(e, '......... X4', False)
self.edgesCleanup.append(copy.copy(edges))
return edges
def orderAndFlipEdges(self, inputEdges):
PathLog.track("entry(%.2f, %.2f, %.2f), exit(%.2f, %.2f, %.2f)" % (self.entry.x, self.entry.y, self.entry.z, self.exit.x, self.exit.y, self.exit.z))
self.edgesOrder = []
outputEdges = []
p0 = self.entry
lastP = p0
edges = copy.copy(inputEdges)
while edges:
# print("(%.2f, %.2f, %.2f) %d %d" % (p0.x, p0.y, p0.z))
for e in copy.copy(edges):
p1 = e.valueAt(e.FirstParameter)
p2 = e.valueAt(e.LastParameter)
if PathGeom.pointsCoincide(p1, p0):
outputEdges.append((e, False))
edges.remove(e)
lastP = None
p0 = p2
debugEdge(e, ">>>>> no flip")
break
elif PathGeom.pointsCoincide(p2, p0):
flipped = PathGeom.flipEdge(e)
if not flipped is None:
outputEdges.append((flipped, True))
else:
p0 = None
cnt = 0
for p in reversed(e.discretize(Deflection=0.01)):
if not p0 is None:
outputEdges.append((Part.Edge(Part.LineSegment(p0, p)), True))
cnt = cnt + 1
p0 = p
PathLog.info("replaced edge with %d straight segments" % cnt)
edges.remove(e)
lastP = None
p0 = p1
debugEdge(e, ">>>>> flip")
break
else:
debugEdge(e, "<<<<< (%.2f, %.2f, %.2f)" % (p0.x, p0.y, p0.z))
if lastP == p0:
self.edgesOrder.append(outputEdges)
self.edgesOrder.append(edges)
PathLog.debug('input edges:')
for e in inputEdges:
debugEdge(e, ' ', False)
PathLog.debug('ordered edges:')
for e, flip in outputEdges:
debugEdge(e, ' %c ' % ('<' if flip else '>'), False)
PathLog.debug('remaining edges:')
for e in edges:
debugEdge(e, ' ', False)
raise ValueError("No connection to %s" % (p0))
elif lastP:
PathLog.debug("xxxxxx (%.2f, %.2f, %.2f) (%.2f, %.2f, %.2f)" % (p0.x, p0.y, p0.z, lastP.x, lastP.y, lastP.z))
else:
PathLog.debug("xxxxxx (%.2f, %.2f, %.2f) -" % (p0.x, p0.y, p0.z))
lastP = p0
PathLog.track("-")
return outputEdges
def isStrut(self, edge):
p1 = PathGeom.xy(edge.valueAt(edge.FirstParameter))
p2 = PathGeom.xy(edge.valueAt(edge.LastParameter))
return PathGeom.pointsCoincide(p1, p2)
def shell(self):
if len(self.edges) > 1:
wire = Part.Wire(self.initialEdge)
else:
edge = self.edges[0]
if PathGeom.pointsCoincide(edge.valueAt(edge.FirstParameter), self.finalEdge.valueAt(self.finalEdge.FirstParameter)):
wire = Part.Wire(self.finalEdge)
elif hasattr(self, 'initialEdge') and PathGeom.pointsCoincide(edge.valueAt(edge.FirstParameter), self.initialEdge.valueAt(self.initialEdge.FirstParameter)):
wire = Part.Wire(self.initialEdge)
else:
wire = Part.Wire(edge)
for edge in self.edges[1:]:
if PathGeom.pointsCoincide(edge.valueAt(edge.FirstParameter), self.finalEdge.valueAt(self.finalEdge.FirstParameter)):
wire.add(self.finalEdge)
else:
wire.add(edge)
shell = wire.extrude(FreeCAD.Vector(0, 0, self.tag.height + 1))
nullFaces = list([f for f in shell.Faces if PathGeom.isRoughly(f.Area, 0)])
if nullFaces:
return shell.removeShape(nullFaces)
return shell
def commandsForEdges(self):
global failures # pylint: disable=global-statement
if self.edges:
try:
shape = self.shell().common(self.tag.solid)
commands = []
rapid = None
for e, flip in self.orderAndFlipEdges(self.cleanupEdges(shape.Edges)):
debugEdge(e, '++++++++ %s' % ('<' if flip else '>'), False)
p1 = e.valueAt(e.FirstParameter)
p2 = e.valueAt(e.LastParameter)
if self.tag.isSquare and (PathGeom.isRoughly(p1.z, self.maxZ) or p1.z > self.maxZ) and (PathGeom.isRoughly(p2.z, self.maxZ) or p2.z > self.maxZ):
rapid = p1 if flip else p2
else:
if rapid:
commands.append(Path.Command('G0', {'X': rapid.x, 'Y': rapid.y, 'Z': rapid.z}))
rapid = None
commands.extend(PathGeom.cmdsForEdge(e, False, False, self.segm, hSpeed = self.hSpeed, vSpeed = self.vSpeed))
if rapid:
commands.append(Path.Command('G0', {'X': rapid.x, 'Y': rapid.y, 'Z': rapid.z}))
# rapid = None # commented out per LGTM suggestion
return commands
except Exception as e: # pylint: disable=broad-except
PathLog.error("Exception during processing tag @(%.2f, %.2f) (%s) - disabling the tag" % (self.tag.x, self.tag.y, e.args[0]))
#if sys.version_info.major < 3:
# traceback.print_exc(e)
#else:
# traceback.print_exc()
self.tag.enabled = False
commands = []
for e in self.edges:
commands.extend(PathGeom.cmdsForEdge(e, hSpeed = self.hSpeed, vSpeed = self.vSpeed))
failures.append(self)
return commands
return []
def add(self, edge):
self.tail = None
self.finalEdge = edge
if self.tag.solid.isInside(edge.valueAt(edge.LastParameter), PathGeom.Tolerance, True):
PathLog.track('solid.isInside')
self.addEdge(edge)
else:
i = self.tag.intersects(edge, edge.LastParameter)
if not i:
self.offendingEdge = edge
debugEdge(edge, 'offending Edge:', False)
o = self.tag.originAt(self.tag.z)
PathLog.debug('originAt: (%.2f, %.2f, %.2f)' % (o.x, o.y, o.z))
i = edge.valueAt(edge.FirstParameter)
if PathGeom.pointsCoincide(i, edge.valueAt(edge.FirstParameter)):
PathLog.track('tail')
self.tail = edge
else:
PathLog.track('split')
e, tail = PathGeom.splitEdgeAt(edge, i)
self.addEdge(e)
self.tail = tail
self.exit = i
self.complete = True
self.commands.extend(self.commandsForEdges())
def mappingComplete(self):
return self.complete
class _RapidEdges:
def __init__(self, rapid):
self.rapid = rapid
def isRapid(self, edge):
if type(edge.Curve) == Part.Line or type(edge.Curve) == Part.LineSegment:
v0 = edge.Vertexes[0]
v1 = edge.Vertexes[1]
for r in self.rapid:
r0 = r.Vertexes[0]
r1 = r.Vertexes[1]
if PathGeom.isRoughly(r0.X, v0.X) and PathGeom.isRoughly(r0.Y, v0.Y) and PathGeom.isRoughly(r0.Z, v0.Z) and PathGeom.isRoughly(r1.X, v1.X) and PathGeom.isRoughly(r1.Y, v1.Y) and PathGeom.isRoughly(r1.Z, v1.Z):
return True
return False
class PathData:
def __init__(self, obj):
PathLog.track(obj.Base.Name)
self.obj = obj
self.wire, rapid = PathGeom.wireForPath(obj.Base.Path)
self.rapid = _RapidEdges(rapid)
if self.wire:
self.edges = self.wire.Edges
else:
self.edges = []
self.baseWire = self.findBottomWire(self.edges)
def findBottomWire(self, edges):
(minZ, maxZ) = self.findZLimits(edges)
self.minZ = minZ
self.maxZ = maxZ
bottom = [e for e in edges if PathGeom.isRoughly(e.Vertexes[0].Point.z, minZ) and PathGeom.isRoughly(e.Vertexes[1].Point.z, minZ)]
self.bottomEdges = bottom
try:
wire = Part.Wire(bottom)
if wire.isClosed():
return wire
except Exception: # pylint: disable=broad-except
#if sys.version_info.major < 3:
# traceback.print_exc(e)
#else:
# traceback.print_exc()
return None
def supportsTagGeneration(self):
return self.baseWire is not None
def findZLimits(self, edges):
# not considering arcs and spheres in Z direction, find the highest and lowest Z values
minZ = 99999999999
maxZ = -99999999999
for e in edges:
if self.rapid.isRapid(e):
continue
for v in e.Vertexes:
if v.Point.z < minZ:
minZ = v.Point.z
if v.Point.z > maxZ:
maxZ = v.Point.z
return (minZ, maxZ)
def shortestAndLongestPathEdge(self):
edges = sorted(self.bottomEdges, key=lambda e: e.Length)
return (edges[0], edges[-1])
def generateTags(self, obj, count, width=None, height=None, angle=None, radius=None, spacing=None):
# pylint: disable=unused-argument
PathLog.track(count, width, height, angle, spacing)
# for e in self.baseWire.Edges:
# debugMarker(e.Vertexes[0].Point, 'base', (0.0, 1.0, 1.0), 0.2)
if spacing:
tagDistance = spacing
else:
tagDistance = self.baseWire.Length / (count if count else 4)
W = width if width else self.defaultTagWidth()
H = height if height else self.defaultTagHeight()
A = angle if angle else self.defaultTagAngle()
R = radius if radius else self.defaultTagRadius()
# start assigning tags on the longest segment
(shortestEdge, longestEdge) = self.shortestAndLongestPathEdge()
startIndex = 0
for i in range(0, len(self.baseWire.Edges)):
edge = self.baseWire.Edges[i]
PathLog.debug(' %d: %.2f' % (i, edge.Length))
if PathGeom.isRoughly(edge.Length, longestEdge.Length):
startIndex = i
break
startEdge = self.baseWire.Edges[startIndex]
startCount = int(startEdge.Length / tagDistance)
if (longestEdge.Length - shortestEdge.Length) > shortestEdge.Length:
startCount = int(startEdge.Length / tagDistance) + 1
lastTagLength = (startEdge.Length + (startCount - 1) * tagDistance) / 2
currentLength = startEdge.Length
minLength = min(2. * W, longestEdge.Length)
PathLog.debug("length=%.2f shortestEdge=%.2f(%.2f) longestEdge=%.2f(%.2f) minLength=%.2f" % (self.baseWire.Length, shortestEdge.Length, shortestEdge.Length/self.baseWire.Length, longestEdge.Length, longestEdge.Length / self.baseWire.Length, minLength))
PathLog.debug(" start: index=%-2d count=%d (length=%.2f, distance=%.2f)" % (startIndex, startCount, startEdge.Length, tagDistance))
PathLog.debug(" -> lastTagLength=%.2f)" % lastTagLength)
PathLog.debug(" -> currentLength=%.2f)" % currentLength)
edgeDict = {startIndex: startCount}
for i in range(startIndex + 1, len(self.baseWire.Edges)):
edge = self.baseWire.Edges[i]
(currentLength, lastTagLength) = self.processEdge(i, edge, currentLength, lastTagLength, tagDistance, minLength, edgeDict)
for i in range(0, startIndex):
edge = self.baseWire.Edges[i]
(currentLength, lastTagLength) = self.processEdge(i, edge, currentLength, lastTagLength, tagDistance, minLength, edgeDict)
tags = []
for (i, count) in PathUtil.keyValueIter(edgeDict):
edge = self.baseWire.Edges[i]
PathLog.debug(" %d: %d" % (i, count))
# debugMarker(edge.Vertexes[0].Point, 'base', (1.0, 0.0, 0.0), 0.2)
# debugMarker(edge.Vertexes[1].Point, 'base', (0.0, 1.0, 0.0), 0.2)
if 0 != count:
distance = (edge.LastParameter - edge.FirstParameter) / count
for j in range(0, count):
tag = edge.Curve.value((j+0.5) * distance)
tags.append(Tag(j, tag.x, tag.y, W, H, A, R, True))
return tags
def copyTags(self, obj, fromObj, width, height, angle, radius, production=True):
print("copyTags(%s, %s, %.2f, %.2f, %.2f, %.2f" % (obj.Label, fromObj.Label, width, height, angle, radius))
W = width if width else self.defaultTagWidth()
H = height if height else self.defaultTagHeight()
A = angle if angle else self.defaultTagAngle()
R = radius if radius else self.defaultTagRadius()
tags = []
j = 0
for i, pos in enumerate(fromObj.Positions):
print("tag[%d]" % i)
if not i in fromObj.Disabled:
dist = self.baseWire.distToShape(Part.Vertex(FreeCAD.Vector(pos.x, pos.y, self.minZ)))
if production or dist[0] < W:
# russ4262:: `production` variable was a `True` declaration, forcing True branch to be processed always
# The application of the `production` argument/variable is to appease LGTM
print("tag[%d/%d]: (%.2f, %.2f, %.2f)" % (i, j, pos.x, pos.y, self.minZ))
at = dist[1][0][0]
tags.append(Tag(j, at.x, at.y, W, H, A, R, True))
j += 1
else:
PathLog.warning("Tag[%d] (%.2f, %.2f, %.2f) is too far away to copy: %.2f (%.2f)" % (i, pos.x, pos.y, self.minZ, dist[0], W))
else:
PathLog.info("tag[%d]: not enabled, skipping" % i)
print("copied %d tags" % len(tags))
return tags
def processEdge(self, index, edge, currentLength, lastTagLength, tagDistance, minLength, edgeDict):
tagCount = 0
currentLength += edge.Length
if edge.Length >= minLength:
while lastTagLength + tagDistance < currentLength:
tagCount += 1
lastTagLength += tagDistance
if tagCount > 0:
PathLog.debug(" index=%d -> count=%d" % (index, tagCount))
edgeDict[index] = tagCount
else:
PathLog.debug(" skipping=%-2d (%.2f)" % (index, edge.Length))
return (currentLength, lastTagLength)
def defaultTagHeight(self):
if hasattr(self.obj, 'Base') and hasattr(self.obj.Base, 'StartDepth') and hasattr(self.obj.Base, 'FinalDepth'):
pathHeight = (self.obj.Base.StartDepth - self.obj.Base.FinalDepth).Value
else:
pathHeight = self.maxZ - self.minZ
height = HoldingTagPreferences.defaultHeight(pathHeight / 2)
if height > pathHeight:
return pathHeight
return height
def defaultTagWidth(self):
width = self.shortestAndLongestPathEdge()[1].Length / 10
return HoldingTagPreferences.defaultWidth(width)
def defaultTagAngle(self):
return HoldingTagPreferences.defaultAngle()
def defaultTagRadius(self):
return HoldingTagPreferences.defaultRadius()
def sortedTags(self, tags):
ordered = []
for edge in self.bottomEdges:
ts = [t for t in tags if PathGeom.isRoughly(0, Part.Vertex(t.originAt(self.minZ)).distToShape(edge)[0], 0.1)]
for t in sorted(ts, key=lambda t, edge=edge: (t.originAt(self.minZ) - edge.valueAt(edge.FirstParameter)).Length):
tags.remove(t)
ordered.append(t)
# disable all tags that are not on the base wire.
for tag in tags:
PathLog.info("Tag #%d (%.2f, %.2f, %.2f) not on base wire - disabling\n" % (len(ordered), tag.x, tag.y, self.minZ))
tag.enabled = False
ordered.append(tag)
return ordered
def pointIsOnPath(self, p):
v = Part.Vertex(self.pointAtBottom(p))
PathLog.debug("pt = (%f, %f, %f)" % (v.X, v.Y, v.Z))
for e in self.bottomEdges:
indent = "{} ".format(e.distToShape(v)[0])
debugEdge(e, indent, True)
if PathGeom.isRoughly(0.0, v.distToShape(e)[0], 0.1):
return True
return False
def pointAtBottom(self, p):
return FreeCAD.Vector(p.x, p.y, self.minZ)
class ObjectTagDressup:
def __init__(self, obj, base):
obj.addProperty("App::PropertyLink", "Base", "Base", QtCore.QT_TRANSLATE_NOOP("Path_DressupTag", "The base path to modify"))
obj.addProperty("App::PropertyLength", "Width", "Tag", QtCore.QT_TRANSLATE_NOOP("Path_DressupTag", "Width of tags."))
obj.addProperty("App::PropertyLength", "Height", "Tag", QtCore.QT_TRANSLATE_NOOP("Path_DressupTag", "Height of tags."))
obj.addProperty("App::PropertyAngle", "Angle", "Tag", QtCore.QT_TRANSLATE_NOOP("Path_DressupTag", "Angle of tag plunge and ascent."))
obj.addProperty("App::PropertyLength", "Radius", "Tag", QtCore.QT_TRANSLATE_NOOP("Path_DressupTag", "Radius of the fillet for the tag."))
obj.addProperty("App::PropertyVectorList", "Positions", "Tag", QtCore.QT_TRANSLATE_NOOP("Path_DressupTag", "Locations of inserted holding tags"))
obj.addProperty("App::PropertyIntegerList", "Disabled", "Tag", QtCore.QT_TRANSLATE_NOOP("Path_DressupTag", "IDs of disabled holding tags"))
obj.addProperty("App::PropertyInteger", "SegmentationFactor", "Tag", QtCore.QT_TRANSLATE_NOOP("Path_DressupTag", "Factor determining the # of segments used to approximate rounded tags."))
# for pylint ...
self.obj = obj
self.solids = []
self.tags = []
self.pathData = None
self.toolRadius = None
self.mappers = []
obj.Proxy = self
obj.Base = base
def __getstate__(self):
return None
def __setstate__(self, state):
self.obj = state
self.solids = []
self.tags = []
self.pathData = None
self.toolRadius = None
self.mappers = []
return None
def onDocumentRestored(self, obj):
self.obj = obj
def supportsTagGeneration(self, obj):
if not self.pathData:
self.setup(obj)
return self.pathData.supportsTagGeneration()
def generateTags(self, obj, count):
if self.supportsTagGeneration(obj):
if self.pathData:
self.tags = self.pathData.generateTags(obj, count, obj.Width.Value, obj.Height.Value, obj.Angle, obj.Radius.Value, None)
obj.Positions = [tag.originAt(self.pathData.minZ) for tag in self.tags]
obj.Disabled = []
return False
else:
self.setup(obj, count)
self.execute(obj)
return True
else:
self.tags = []
obj.Positions = []
obj.Disabled = []
return False
def copyTags(self, obj, fromObj):
obj.Width = fromObj.Width
obj.Height = fromObj.Height
obj.Angle = fromObj.Angle
obj.Radius = fromObj.Radius
obj.SegmentationFactor = fromObj.SegmentationFactor
self.tags = self.pathData.copyTags(obj, fromObj, obj.Width.Value, obj.Height.Value, obj.Angle, obj.Radius.Value)
obj.Positions = [tag.originAt(self.pathData.minZ) for tag in self.tags]
obj.Disabled = []
return False
def isValidTagStartIntersection(self, edge, i):
if PathGeom.pointsCoincide(i, edge.valueAt(edge.LastParameter)):
return False
p1 = edge.valueAt(edge.FirstParameter)
p2 = edge.valueAt(edge.LastParameter)
if PathGeom.pointsCoincide(PathGeom.xy(p1), PathGeom.xy(p2)):
# if this vertical goes up, it can't be the start of a tag intersection
if p1.z < p2.z:
return False
return True
def createPath(self, obj, pathData, tags):
PathLog.track()
commands = []
lastEdge = 0
lastTag = 0
# sameTag = None
t = 0
# inters = None
edge = None
segm = 50
if hasattr(obj, 'SegmentationFactor'):
segm = obj.SegmentationFactor
if segm <= 0:
segm = 50
obj.SegmentationFactor = 50
self.mappers = []
mapper = None
tc = PathDressup.toolController(obj.Base)
horizFeed = tc.HorizFeed.Value
vertFeed = tc.VertFeed.Value
horizRapid = tc.HorizRapid.Value
vertRapid = tc.VertRapid.Value
while edge or lastEdge < len(pathData.edges):
PathLog.debug("------- lastEdge = %d/%d.%d/%d" % (lastEdge, lastTag, t, len(tags)))
if not edge:
edge = pathData.edges[lastEdge]
debugEdge(edge, "======= new edge: %d/%d" % (lastEdge, len(pathData.edges)))
lastEdge += 1
# sameTag = None
if mapper:
mapper.add(edge)
if mapper.mappingComplete():
commands.extend(mapper.commands)
edge = mapper.tail
mapper = None
else:
edge = None
if edge:
tIndex = (t + lastTag) % len(tags)
t += 1
i = tags[tIndex].intersects(edge, edge.FirstParameter)
if i and self.isValidTagStartIntersection(edge, i):
mapper = MapWireToTag(edge, tags[tIndex], i, segm, pathData.maxZ, hSpeed = horizFeed, vSpeed = vertFeed)
self.mappers.append(mapper)
edge = mapper.tail
if not mapper and t >= len(tags):
# gone through all tags, consume edge and move on
if edge:
debugEdge(edge, '++++++++')
if pathData.rapid.isRapid(edge):
v = edge.Vertexes[1]
if not commands and PathGeom.isRoughly(0, v.X) and PathGeom.isRoughly(0, v.Y) and not PathGeom.isRoughly(0, v.Z):
# The very first move is just to move to ClearanceHeight
commands.append(Path.Command('G0', {'Z': v.Z, 'F': horizRapid}))
else:
commands.append(Path.Command('G0', {'X': v.X, 'Y': v.Y, 'Z': v.Z, 'F': vertRapid}))
else:
commands.extend(PathGeom.cmdsForEdge(edge, segm=segm, hSpeed = horizFeed, vSpeed = vertFeed))
edge = None
t = 0
return Path.Path(commands)
def problems(self):
return list([m for m in self.mappers if m.haveProblem])
def createTagsPositionDisabled(self, obj, positionsIn, disabledIn):
rawTags = []
for i, pos in enumerate(positionsIn):
tag = Tag(i, pos.x, pos.y, obj.Width.Value, obj.Height.Value, obj.Angle, obj.Radius, not i in disabledIn)
tag.createSolidsAt(self.pathData.minZ, self.toolRadius)
rawTags.append(tag)
# disable all tags that intersect with their previous tag
prev = None
tags = []
positions = []
disabled = []
for i, tag in enumerate(self.pathData.sortedTags(rawTags)):
if tag.enabled:
if prev:
if prev.solid.common(tag.solid).Faces:
PathLog.info("Tag #%d intersects with previous tag - disabling\n" % i)
PathLog.debug("this tag = %d [%s]" % (i, tag.solid.BoundBox))
tag.enabled = False
elif self.pathData.edges:
e = self.pathData.edges[0]
p0 = e.valueAt(e.FirstParameter)
p1 = e.valueAt(e.LastParameter)
if tag.solid.isInside(p0, PathGeom.Tolerance, True) or tag.solid.isInside(p1, PathGeom.Tolerance, True):
PathLog.info("Tag #%d intersects with starting point - disabling\n" % i)
tag.enabled = False
if tag.enabled:
prev = tag
PathLog.debug("previousTag = %d [%s]" % (i, prev))
else:
disabled.append(i)
tag.nr = i # assign final nr
tags.append(tag)
positions.append(tag.originAt(self.pathData.minZ))
return (tags, positions, disabled)
def execute(self, obj):
# import cProfile
# pr = cProfile.Profile()
# pr.enable()
self.doExecute(obj)
# pr.disable()
# pr.print_stats()
def doExecute(self, obj):
if not obj.Base:
return
if not obj.Base.isDerivedFrom("Path::Feature"):
return
if not obj.Base.Path:
return
if not obj.Base.Path.Commands:
return
pathData = self.setup(obj)
if not pathData:
PathLog.debug("execute - no pathData")
return
self.tags = []
if hasattr(obj, "Positions"):
self.tags, positions, disabled = self.createTagsPositionDisabled(obj, obj.Positions, obj.Disabled)
if obj.Disabled != disabled:
PathLog.debug("Updating properties.... %s vs. %s" % (obj.Disabled, disabled))
obj.Positions = positions
obj.Disabled = disabled
if not self.tags:
PathLog.debug("execute - no tags")
obj.Path = obj.Base.Path
return
try:
self.processTags(obj)
except Exception as e: # pylint: disable=broad-except
PathLog.error("processing tags failed clearing all tags ... '%s'" % (e.args[0]))
#if sys.version_info.major < 3:
# traceback.print_exc(e)
#else:
# traceback.print_exc()
obj.Path = obj.Base.Path
# update disabled in case there are some additional ones
disabled = copy.copy(self.obj.Disabled)
solids = []
for tag in self.tags:
solids.append(tag.solid)
if not tag.enabled and tag.nr not in disabled:
disabled.append(tag.nr)
self.solids = solids
if obj.Disabled != disabled:
obj.Disabled = disabled
@waiting_effects
def processTags(self, obj):
global failures # pylint: disable=global-statement
failures = []
tagID = 0
if PathLog.getLevel(PathLog.thisModule()) == PathLog.Level.DEBUG:
for tag in self.tags:
tagID += 1
if tag.enabled:
PathLog.debug("x=%s, y=%s, z=%s" % (tag.x, tag.y, self.pathData.minZ))
# debugMarker(FreeCAD.Vector(tag.x, tag.y, self.pathData.minZ), "tag-%02d" % tagID , (1.0, 0.0, 1.0), 0.5)
# if not PathGeom.isRoughly(90, tag.angle):
# debugCone(tag.originAt(self.pathData.minZ), tag.r1, tag.r2, tag.actualHeight, "tag-%02d" % tagID)
# else:
# debugCylinder(tag.originAt(self.pathData.minZ), tag.fullWidth()/2, tag.actualHeight, "tag-%02d" % tagID)
obj.Path = self.createPath(obj, self.pathData, self.tags)
def setup(self, obj, generate=False):
PathLog.debug("setup")
self.obj = obj
try:
pathData = PathData(obj)
except ValueError:
PathLog.error(translate("Path_DressupTag", "Cannot insert holding tags for this path - please select a Profile path")+"\n")
#if sys.version_info.major < 3:
# traceback.print_exc(e)
#else:
# traceback.print_exc()
return None
self.toolRadius = float(PathDressup.toolController(obj.Base).Tool.Diameter) / 2
self.pathData = pathData
if generate:
obj.Height = self.pathData.defaultTagHeight()
obj.Width = self.pathData.defaultTagWidth()
obj.Angle = self.pathData.defaultTagAngle()
obj.Radius = self.pathData.defaultTagRadius()
count = HoldingTagPreferences.defaultCount()
self.generateTags(obj, count)
return self.pathData
def setXyEnabled(self, triples):
PathLog.track()
if not self.pathData:
self.setup(self.obj)
positions = []
disabled = []
for i, (x, y, enabled) in enumerate(triples):
# print("%d: (%.2f, %.2f) %d" % (i, x, y, enabled))
positions.append(FreeCAD.Vector(x, y, 0))
if not enabled:
disabled.append(i)
self.tags, self.obj.Positions, self.obj.Disabled = self.createTagsPositionDisabled(self.obj, positions, disabled)
self.processTags(self.obj)
def pointIsOnPath(self, obj, point):
if not self.pathData:
self.setup(obj)
return self.pathData.pointIsOnPath(point)
def pointAtBottom(self, obj, point):
if not self.pathData:
self.setup(obj)
return self.pathData.pointAtBottom(point)
def Create(baseObject, name='DressupTag'):
'''
Create(basePath, name='DressupTag') ... create tag dressup object for the given base path.
'''
if not baseObject.isDerivedFrom('Path::Feature'):
PathLog.error(translate('Path_DressupTag', 'The selected object is not a path')+'\n')
return None
if baseObject.isDerivedFrom('Path::FeatureCompoundPython'):
PathLog.error(translate('Path_DressupTag', 'Please select a Profile object'))
return None
obj = FreeCAD.ActiveDocument.addObject("Path::FeaturePython", name)
dbo = ObjectTagDressup(obj, baseObject)
job = PathUtils.findParentJob(baseObject)
job.Proxy.addOperation(obj, baseObject)
dbo.setup(obj, True)
return obj
PathLog.notice("Loading Path_DressupTag... done\n")
|
sanguinariojoe/FreeCAD
|
src/Mod/Path/PathScripts/PathDressupHoldingTags.py
|
Python
|
lgpl-2.1
| 47,030
|
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from getpaid.backends import PaymentProcessorBase
class PaymentProcessor(PaymentProcessorBase):
BACKEND = 'getpaid.backends.dummy'
BACKEND_NAME = _('Dummy backend')
BACKEND_ACCEPTED_CURRENCY = (u'PLN', u'EUR', u'USD')
def get_gateway_url(self, request):
return reverse('getpaid-dummy-authorization', kwargs={'pk': self.payment.pk}), "GET", {}
|
anih/django-getpaid
|
getpaid/backends/dummy/__init__.py
|
Python
|
mit
| 458
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..framework import Program, default_main_program, Parameter, Variable
from ..layer_helper import LayerHelper
def hash_name_to_server(params_grads, pserver_endpoints):
"""
:param param_grads:
:return: a map of pserver endpoint ->
params -> [param list]
grads -> [grad list]
"""
def _hash_param(param_name, total):
return hash(param_name) % total
param_grad_map = dict()
for param, grad in params_grads:
if param.trainable is True and grad is not None:
server_id = _hash_param(param.name, len(pserver_endpoints))
server_for_param = pserver_endpoints[server_id]
if not param_grad_map.has_key(server_for_param):
param_grad_map[server_for_param] = {"params": [], "grads": []}
param_grad_map[server_for_param]["params"].append(param)
param_grad_map[server_for_param]["grads"].append(grad)
return param_grad_map
def round_robin(params_grads, pserver_endpoints):
assert (len(params_grads) > len(pserver_endpoints))
param_grad_map = dict()
pserver_idx = 0
for param, grad in params_grads:
if param.trainable is True:
server_for_param = pserver_endpoints[pserver_idx]
if not param_grad_map.has_key(server_for_param):
param_grad_map[server_for_param] = {"params": [], "grads": []}
param_grad_map[server_for_param]["params"].append(param)
param_grad_map[server_for_param]["grads"].append(grad)
pserver_idx += 1
if pserver_idx >= len(pserver_endpoints):
pserver_idx = 0
return param_grad_map
class SimpleDistributeTranspiler:
def transpile(self,
optimize_ops,
params_grads,
program=None,
pservers="127.0.0.1:6174",
trainers=1,
split_method=round_robin):
"""
Transpile the program to a distributed data-parallelism programs.
The main_program will be transform to use a remote parameter server
to do parameter optimization. And the optimization graph will be put
in to a parameter server program.
Use different methods to split trainable varialbles to different
parameter servers.
Example to run:
exe = fluid.Executor(place)
t = fluid.DistributeTranspiler()
t.transpile(optimize_ops, params_grads, pservers="127.0.0.1:6174", trainers=1)
pserver_endpoint = os.getenv("PSERVER")
if pserver_endpoint:
pserver_prog = t.get_pserver_program(pserver_endpoint, optimize_ops)
exe.run(fluid.default_startup_program())
exe.run(pserver_prog)
else:
feeder = fluid.DataFeeder(feed_list=[images, label], place=place)
exe.run(fluid.default_startup_program())
for pass_id in range(PASS_NUM):
...
:param optimize_ops: op list of optimization, should be the
return value of Optimizer.minimize
:type optimize_ops: list
:param program: program to optimize, default default_main_program
:param pservers: parameter server endpoints like "m1:6174,m2:6174"
:type pservers: string
:return: return a list of programs
"""
if program is None:
program = default_main_program()
self.program = program
self.trainers = trainers
self.optimize_ops = optimize_ops
self._optimize_distributed(
optimize_ops,
program,
params_grads,
pservers=pservers,
trainers=trainers,
split_method=split_method)
def _clone_param(self, block, v):
assert isinstance(v, Parameter)
new_p = Parameter(
block=block,
shape=v.shape,
dtype=v.dtype,
type=v.type,
lod_level=v.lod_level,
stop_gradient=v.stop_gradient,
trainable=v.trainable,
optimize_attr=v.optimize_attr,
regularizer=v.regularizer,
name=v.name)
block.vars[new_p.name] = new_p
def _clone_var(self, block, var):
assert isinstance(var, Variable)
return block.create_var(
name=var.name,
shape=var.shape,
dtype=var.dtype,
type=var.type,
lod_level=var.lod_level,
persistable=var.persistable)
def _optimize_distributed(self, optimize_ops, program, params_and_grads,
**kwargs):
if kwargs.has_key("split_method"):
split_method = kwargs["split_method"]
else:
split_method = round_robin
assert (callable(split_method))
pserver_endpoints = kwargs["pservers"].split(",")
self.param_grad_map = split_method(params_and_grads, pserver_endpoints)
send_op_ordered_inputs = []
send_op_ordered_outputs = []
epmap = []
for ep, v in self.param_grad_map.iteritems():
send_op_ordered_inputs.extend(v["grads"])
send_op_ordered_outputs.extend(v["params"])
for i in v["grads"]:
epmap.append(ep)
send_op = program.global_block().append_op(
type="send",
inputs={"X": send_op_ordered_inputs
}, # inputs is a list of tensors to be send
outputs={"Out": send_op_ordered_outputs},
attrs={"endpoints": pserver_endpoints,
"epmap": epmap})
def get_trainer_program(self):
# remove optimize ops and add a send op to main_program
self.program.global_block().delete_ops(self.optimize_ops)
return self.program
def _create_var_for_trainers(self, block, var, trainers):
var_list = []
for i in xrange(trainers):
var_each = block.create_var(
name="%s.trainer_%d" % (var.name, i),
psersistable=var.persistable,
dtype=var.dtype,
shape=var.shape)
var_list.append(var_each)
return var_list
def get_pserver_program(self, endpoint, optimize_ops):
pserver_program = Program()
for v in self.param_grad_map[endpoint]["params"]:
self._clone_param(pserver_program.global_block(), v)
optimize_sub_program = Program()
grad_var_names = [
var.name for var in self.param_grad_map[endpoint]["grads"]
]
for opt_op in optimize_ops:
for _, var in opt_op.inputs.iteritems():
# NOTE: append operators to merge gradients from multiple
# trainers. If trainers == 1, this is not needed.
if self.trainers > 1 and var.name in grad_var_names:
vars2merge = self._create_var_for_trainers(
optimize_sub_program.global_block(), var, self.trainers)
merged_var = optimize_sub_program.global_block().create_var(
name=var.name,
persistable=var.persistable,
dtype=var.dtype,
shape=var.shape)
optimize_sub_program.global_block().append_op(
type="sum",
inputs={"X": vars2merge},
outputs={"Out": merged_var})
optimize_sub_program.global_block().append_op(
type="scale",
inputs={"X": merged_var},
outputs={"Out": merged_var},
attrs={"scale": 1.0 / float(self.trainers)})
else:
optimize_sub_program.global_block().create_var(
name=var.name,
persistable=var.persistable,
dtype=var.dtype,
shape=var.shape)
if opt_op.inputs.has_key("Grad"):
if opt_op.inputs["Grad"].name in grad_var_names:
optimize_sub_program.global_block().append_op(
type=opt_op.type,
inputs=opt_op.inputs,
outputs=opt_op.outputs,
attrs=opt_op.attrs)
else:
optimize_sub_program.global_block().append_op(
type=opt_op.type,
inputs=opt_op.inputs,
outputs=opt_op.outputs,
attrs=opt_op.attrs)
pserver_program.global_block().append_op(
type="recv",
inputs={"RX":
self.param_grad_map[endpoint]["grads"]}, # grads to recv
outputs={},
attrs={
"OptimizeBlock": optimize_sub_program.global_block(),
"endpoint": endpoint,
"ParamList":
[p.name for p in self.param_grad_map[endpoint]["params"]],
"GradList":
[p.name for p in self.param_grad_map[endpoint]["grads"]],
"Trainers": self.trainers
})
pserver_program.sync_with_cpp()
return pserver_program
|
putcn/Paddle
|
python/paddle/fluid/transpiler/distribute_transpiler_simple.py
|
Python
|
apache-2.0
| 10,044
|
"""Contains functions for dealing with the .pdb file format."""
from datetime import datetime
import re
from itertools import groupby, chain
import valerius
from math import ceil
from .data import CODES
from .structures import Residue, Ligand
from .mmcif import add_secondary_structure_to_polymers
def pdb_string_to_pdb_dict(filestring):
"""Takes a .pdb filestring and turns into a ``dict`` which represents its
record structure. Only lines which aren't empty are used.
The resultant dictionary has line types as the keys, which point to the
lines as its value. So ``{"TITLE": ["TITLE line 1", "TITLE line 2"]}`` etc.
The exceptions are the REMARK records, where there is a sub-dictionary with
REMARK numbers as keys, and the structure records themselves which are just
arranged into lists - one for each model.
:param str filestring: the .pdb filestring to process.
:rtype: ``dict``"""
pdb_dict = {}
lines = list(filter(lambda l: bool(l.strip()), filestring.split("\n")))
lines = [[line[:6].rstrip(), line.rstrip()] for line in lines]
model_recs = ("ATOM", "HETATM", "ANISOU", "MODEL", "TER", "ENDMDL")
model = []
in_model = False
for head, line in lines:
if head == "REMARK":
if "REMARK" not in pdb_dict: pdb_dict["REMARK"] = {}
number = line.lstrip().split()[1]
update_dict(pdb_dict["REMARK"], number, line)
elif head in model_recs:
if "MODEL" not in pdb_dict: pdb_dict["MODEL"] = [[]]
if head == "ENDMDL":
pdb_dict["MODEL"].append([])
elif head != "MODEL":
pdb_dict["MODEL"][-1].append(line)
else:
update_dict(pdb_dict, head, line)
if "MODEL" in pdb_dict and not pdb_dict["MODEL"][-1]: pdb_dict["MODEL"].pop()
return pdb_dict
def update_dict(d, key, value):
"""Takes a dictionary where the values are lists, and adds a value to one of
the lists at the specific key. If the list doesn't exist, it creates it
first.
The dictionary is changed in place.
:param dict d: the dictionary to update.
:param str key: the location of the list.
:param str value: the value to add to the list."""
try:
d[key].append(value)
except: d[key] = [value]
def pdb_dict_to_data_dict(pdb_dict):
"""Converts an .pdb dictionary into an atomium data dictionary, with the
same standard layout that the other file formats get converted into.
:param dict pdb_dict: the .pdb dictionary.
:rtype: ``dict``"""
data_dict = {
"description": {
"code": None, "title": None, "deposition_date": None,
"classification": None, "keywords": [], "authors": []
}, "experiment": {
"technique": None, "source_organism": None, "expression_system": None,
"missing_residues": []
}, "quality": {"resolution": None, "rvalue": None, "rfree": None},
"geometry": {"assemblies": [], "crystallography": {}}, "models": []
}
update_description_dict(pdb_dict, data_dict)
update_experiment_dict(pdb_dict, data_dict)
update_quality_dict(pdb_dict, data_dict)
update_geometry_dict(pdb_dict, data_dict)
update_models_list(pdb_dict, data_dict)
return data_dict
def update_description_dict(pdb_dict, data_dict):
"""Creates the description component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_header(pdb_dict, data_dict["description"])
extract_title(pdb_dict, data_dict["description"])
extract_keywords(pdb_dict, data_dict["description"])
extract_authors(pdb_dict, data_dict["description"])
def update_experiment_dict(pdb_dict, data_dict):
"""Creates the experiment component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_technique(pdb_dict, data_dict["experiment"])
extract_source(pdb_dict, data_dict["experiment"])
extract_missing_residues(pdb_dict, data_dict["experiment"])
def update_quality_dict(pdb_dict, data_dict):
"""Creates the quality component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_resolution_remark(pdb_dict, data_dict["quality"])
extract_rvalue_remark(pdb_dict, data_dict["quality"])
def update_geometry_dict(pdb_dict, data_dict):
"""Creates the geometry component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_assembly_remark(pdb_dict, data_dict["geometry"])
extract_crystallography(pdb_dict, data_dict["geometry"])
def update_models_list(pdb_dict, data_dict):
"""Creates model dictionaries in a data dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
sequences = make_sequences(pdb_dict)
secondary_structure = make_secondary_structure(pdb_dict)
full_names = get_full_names(pdb_dict)
for model_lines in pdb_dict["MODEL"]:
aniso = make_aniso(model_lines)
last_ter = get_last_ter_line(model_lines)
model = {"polymer": {}, "non-polymer": {}, "water": {}}
count = 0
for index, line in enumerate(model_lines):
if line[:6] in ["ATOM ", "HETATM"]:
chain_id = line[21] if index < last_ter else id_from_line(line)
res_id = id_from_line(line)
if index < last_ter:
add_atom_to_polymer(line, model, chain_id, res_id, aniso, full_names)
else:
add_atom_to_non_polymer(line, model, res_id, aniso, full_names)
for chain_id, chain in model["polymer"].items():
chain["sequence"] = sequences.get(chain_id, "")
add_secondary_structure_to_polymers(model, secondary_structure)
data_dict["models"].append(model)
def extract_header(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the HEADER
line.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("HEADER"):
line = pdb_dict["HEADER"][0]
if line[50:59].strip():
description_dict["deposition_date"] = datetime.strptime(
line[50:59], "%d-%b-%y"
).date()
if line[62:66].strip(): description_dict["code"] = line[62:66]
if line[10:50].strip():
description_dict["classification"] = line[10:50].strip()
def extract_title(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the TITLE
lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("TITLE"):
description_dict["title"] = merge_lines(pdb_dict["TITLE"], 10)
def extract_keywords(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the KEYWDS
line.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("KEYWDS"):
text = merge_lines(pdb_dict["KEYWDS"], 10)
description_dict["keywords"] = [w.strip() for w in text.split(",")]
def extract_authors(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the AUTHOR
line.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("AUTHOR"):
text = merge_lines(pdb_dict["AUTHOR"], 10)
description_dict["authors"] = [w.strip() for w in text.split(",")]
def extract_technique(pdb_dict, experiment_dict):
"""Takes a ``dict`` and adds technique information to it by parsing EXPDTA
lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict experiment_dict: the ``dict`` to update."""
if pdb_dict.get("EXPDTA"):
if pdb_dict["EXPDTA"][0].strip():
experiment_dict["technique"] = pdb_dict["EXPDTA"][0][6:].strip()
def extract_source(pdb_dict, experiment_dict):
"""Takes a ``dict`` and adds source information to it by parsing SOURCE
lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict experiment_dict: the ``dict`` to update."""
if pdb_dict.get("SOURCE"):
data = merge_lines(pdb_dict["SOURCE"], 10)
patterns = {
"source_organism": r"ORGANISM_SCIENTIFIC\: (.+?);",
"expression_system": r"EXPRESSION_SYSTEM\: (.+?);"
}
for attribute, pattern in patterns.items():
matches = re.findall(pattern, data)
if matches:
experiment_dict[attribute] = matches[0]
def extract_missing_residues(pdb_dict, experiment_dict):
"""Takes a ``dict`` and adds missing residue information to it by parsing
REMARK 465 lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict experiment_dict: the ``dict`` to update."""
for line in pdb_dict.get("REMARK", {}).get("465", []):
chunks = line.strip().split()
if len(chunks) == 5:
experiment_dict["missing_residues"].append({
"name": chunks[2], "id": f"{chunks[3]}.{chunks[4]}"
})
def extract_resolution_remark(pdb_dict, quality_dict):
"""Takes a ``dict`` and adds resolution information to it by parsing REMARK
2 lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict quality_dict: the ``dict`` to update."""
if pdb_dict.get("REMARK") and pdb_dict["REMARK"].get("2"):
for remark in pdb_dict["REMARK"]["2"]:
try:
quality_dict["resolution"] = float(remark[10:].strip().split()[1])
break
except: pass
def extract_rvalue_remark(pdb_dict, quality_dict):
"""Takes a ``dict`` and adds resolution information to it by parsing REMARK
3 lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict quality_dict: the ``dict`` to update."""
if pdb_dict.get("REMARK") and pdb_dict["REMARK"].get("3"):
patterns = {
"rvalue": r"R VALUE.+WORKING.+?: (.+)",
"rfree": r"FREE R VALUE[ ]{2,}: (.+)",
}
for attribute, pattern in patterns.items():
for remark in pdb_dict["REMARK"]["3"]:
matches = re.findall(pattern, remark.strip())
if matches:
try:
quality_dict[attribute] = float(matches[0].strip())
except: pass
break
def extract_assembly_remark(pdb_dict, geometry_dict):
"""Takes a ``dict`` and adds assembly information to it by parsing REMARK
350 lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict geometry_dict: the ``dict`` to update."""
if pdb_dict.get("REMARK") and pdb_dict["REMARK"].get("350"):
groups = [list(g) for k, g in groupby(
pdb_dict["REMARK"]["350"], lambda x: "ECULE:" in x
)][1:]
assemblies = [list(chain(*a)) for a in zip(groups[::2], groups[1::2])]
for a in assemblies:
geometry_dict["assemblies"].append(
assembly_lines_to_assembly_dict(a)
)
def assembly_lines_to_assembly_dict(lines):
"""Takes the lines representing a single biological assembly and turns
them into an assembly dictionary.
:param list lines: The REMARK lines to read.
:rtype: ``dict``"""
assembly = {
"transformations": [], "software": None, "buried_surface_area": None,
"surface_area": None, "delta_energy": None, "id": 0
}
patterns = [[r"(.+)SOFTWARE USED: (.+)", "software", lambda x: x],
[r"(.+)BIOMOLECULE: (.+)", "id", int],
[r"(.+)SURFACE AREA: (.+) [A-Z]", "buried_surface_area", float],
[r"(.+)AREA OF THE COMPLEX: (.+) [A-Z]", "surface_area", float],
[r"(.+)FREE ENERGY: (.+) [A-Z]", "delta_energy", float]]
t = None
for line in lines:
for p in patterns:
matches = re.findall(p[0], line)
if matches: assembly[p[1]] = p[2](matches[0][1].strip())
if "APPLY THE FOLLOWING" in line:
if t: assembly["transformations"].append(t)
t = {"chains": [], "matrix": [], "vector": []}
if "CHAINS:" in line:
t["chains"] += [c.strip() for c in
line.split(":")[-1].strip().split(",") if c.strip()]
if "BIOMT" in line:
values = [float(x) for x in line.split()[4:]]
if len(t["matrix"]) == 3:
assembly["transformations"].append(t)
t = {"chains": t["chains"], "matrix": [], "vector": []}
t["matrix"].append(values[:3])
t["vector"].append(values[-1])
if t: assembly["transformations"].append(t)
return assembly
def extract_crystallography(pdb_dict, geometry_dict):
"""Takes a ``dict`` and adds assembly information to it by parsing the
CRYST1 record.
:param dict pdb_dict: the ``dict`` to read.
:param dict geometry_dict: the ``dict`` to update."""
if pdb_dict.get("CRYST1"):
line = pdb_dict["CRYST1"][0]
values = line.split()
geometry_dict["crystallography"]["space_group"] = line[55:66].strip()
geometry_dict["crystallography"]["unit_cell"] = [
float(val) for val in values[1:7]
] if len(values) >= 6 else []
def make_sequences(pdb_dict):
"""Creates a mapping of chain IDs to sequences, by parsing SEQRES records.
:param dict pdb_dict: the .pdb dictionary to read.
:rtype: ``dict``"""
seq = {}
if pdb_dict.get("SEQRES"):
for line in pdb_dict["SEQRES"]:
chain, residues = line[11], line[19:].strip().split()
if chain not in seq:
seq[chain] = []
seq[chain] += residues
return {k: "".join([CODES.get(r, "X") for r in v]) for k, v in seq.items()}
def make_secondary_structure(pdb_dict):
"""Creates a dictionary of helices and strands, with each having a list of
start and end residues.
:param pdb_dict: the .pdb dict to read.
:rtype: ``dict``"""
helices, strands = [], []
for helix in pdb_dict.get("HELIX", []):
helices.append([
f"{helix[19]}.{helix[21:25].strip()}{helix[25].strip()}",
f"{helix[31]}.{helix[33:37].strip()}{helix[37].strip() if len(helix) > 37 else ''}",
])
for strand in pdb_dict.get("SHEET", []):
strands.append([
f"{strand[21]}.{strand[22:26].strip()}{strand[26].strip()}",
f"{strand[32]}.{strand[33:37].strip()}{strand[37].strip() if len(strand) > 37 else ''}",
])
return {"helices": helices, "strands": strands}
def get_full_names(pdb_dict):
"""Creates a mapping of het names to full English names.
:param pdb_dict: the .pdb dict to read.
:rtype: ``dict``"""
full_names = {}
for line in pdb_dict.get("HETNAM", []):
try:
full_names[line[11:14].strip()] += line[15:].strip()
except: full_names[line[11:14].strip()] = line[15:].strip()
return full_names
def make_aniso(model_lines):
"""Creates a mapping of chain IDs to anisotropy, by parsing ANISOU records.
:param dict pdb_dict: the .pdb dictionary to read.
:rtype: ``dict``"""
return {int(line[6:11].strip()): [
int(line[n * 7 + 28:n * 7 + 35]) / 10000 for n in range(6)
] for line in model_lines if line[:6] == "ANISOU"}
def get_last_ter_line(model_lines):
"""Gets the index of the last TER record in a list of records. 0 will be
returned if there are none.
:param list model_lines: the lines to search.
:rtype: ``int``"""
last_ter = 0
for index, line in enumerate(model_lines[::-1]):
if line[:3] == "TER":
last_ter = len(model_lines) - index - 1
break
return last_ter
def id_from_line(line):
"""Creates a residue ID from an atom line.
:param str line: the ATOM or HETATM line record.
:rtype: ``str``"""
return "{}.{}{}".format(line[21], line[22:26].strip(), line[26].strip())
def add_atom_to_polymer(line, model, chain_id, res_id, aniso_dict, full_names):
"""Takes an .pdb ATOM or HETATM record, converts it, and adds it to a
polymer dictionary.
:param dict line: the line to read.
:param dict model: the model to update.
:param str chain_id: the chain ID to add to.
:param str res_id: the molecule ID to add to.
:param dict aniso_dict: lookup dictionary for anisotropy information."""
try:
model["polymer"][chain_id]["residues"][res_id]["atoms"][
int(line[6:11])
] = atom_line_to_dict(line, aniso_dict)
except:
name = line[17:20].strip()
try:
model["polymer"][chain_id]["residues"][res_id] = {
"name": name, "full_name": full_names.get(name),
"atoms": {int(line[6:11]): atom_line_to_dict(line, aniso_dict)},
"number": len(model["polymer"][chain_id]["residues"]) + 1
}
except:
model["polymer"][chain_id] = {
"internal_id": chain_id, "helices": [], "strands": [],
"residues": {res_id: {
"name": line[17:20].strip(),
"atoms": {int(line[6:11]): atom_line_to_dict(line, aniso_dict)},
"number": 1, "full_name": None,
}}
}
def add_atom_to_non_polymer(line, model, res_id, aniso_dict, full_names):
"""Takes an .pdb ATOM or HETATM record, converts it, and adds it to a
non-polymer dictionary.
:param dict line: the line to read.
:param dict model: the model to update.
:param str res_id: the molecule ID to add to.
:param dict aniso_dict: lookup dictionary for anisotropy information."""
key = "water" if line[17:20] in ["HOH", "DOD"] else "non-polymer"
try:
model[key][res_id]["atoms"][
int(line[6:11])
] = atom_line_to_dict(line, aniso_dict)
except:
name = line[17:20].strip()
model[key][res_id] = {
"name": name, "full_name": full_names.get(name),
"internal_id": line[21], "polymer": line[21],
"atoms": {int(line[6:11]): atom_line_to_dict(line, aniso_dict)}
}
def atom_line_to_dict(line, aniso_dict):
"""Converts an ATOM or HETATM record to an atom dictionary.
:param str line: the record to convert.
:param dict aniso_dict: the anisotropy dictionary to use.
:rtype: ``dict``"""
a = {
"occupancy": 1, "bvalue": None, "charge": 0,
"anisotropy": aniso_dict.get(int(line[6:11].strip()), [0, 0, 0, 0, 0, 0])
}
a["is_hetatm"] = line[:6] == "HETATM"
a["name"] = line[12:16].strip() or None
a["alt_loc"] = line[16].strip() or None
a["x"] = float(line[30:38].strip())
a["y"] = float(line[38:46].strip())
a["z"] = float(line[46:54].strip())
if line[54:60].strip(): a["occupancy"] = float(line[54:60].strip())
if line[60:66].strip(): a["bvalue"] = float(line[60:66].strip())
a["element"] = line[76:78].strip() or None
if line[78:80].strip():
try:
a["charge"] = int(line[78:80].strip())
except: a["charge"] = int(line[78:80][::-1].strip())
return a
def merge_lines(lines, start, join=" "):
"""Gets a single continuous string from a sequence of lines.
:param list lines: The lines to merge.
:param int start: The start point in each record.
:param str join: The string to join on.
:rtype: ``str``"""
string = join.join([line[start:].strip() for line in lines])
return string
def structure_to_pdb_string(structure):
"""Converts a :py:class:`.AtomStructure` to a .pdb filestring.
:param AtomStructure structure: the structure to convert.
:rtype: ``str``"""
lines = []
pack_sequences(structure, lines)
atoms = sorted(structure.atoms(), key=lambda a: a.id)
for i, atom in enumerate(atoms):
atom_to_atom_line(atom, lines)
if isinstance(atom.het, Residue) and (
atom is atoms[-1] or atoms[i + 1].chain is not atom.chain or
isinstance(atoms[i + 1].het, Ligand)):
last = lines[-1]
lines.append(f"TER {last[6:11]} {last[17:20]} {last[21]}{last[22:26]}{last[26]}")
return "\n".join(lines)
def pack_sequences(structure, lines):
"""Adds SEQRES lines from polymer sequence data.
:param AtomStructure structure: the structure to convert.
:param list lines: the string lines to update."""
try:
for chain in sorted(structure.chains(), key=lambda c: c.id):
residues = valerius.from_string(chain.sequence).codes
length = len(residues)
line_count = ceil(length / 13)
for line_num in range(line_count):
lines += ["SEQRES {:>3} {} {:>4} {}".format(
line_num + 1, chain.id, length,
" ".join(residues[line_num * 13: (line_num + 1) * 13])
)]
except AttributeError: pass
def atom_to_atom_line(a, lines):
"""Converts an :py:class:`.Atom` to an ATOM or HETATM record. ANISOU lines
will also be added where appropriate.
:param Atom a: The Atom to pack.
:param list lines: the string lines to update."""
line = "{:6}{:5} {:4} {:3} {:1}{:4}{:1} "
line += "{:>8}{:>8}{:>8} 1.00{:6} {:>2}{:2}"
id_, residue_name, chain_id, residue_id, insert_code = "", "", "", "", ""
if a.het:
id_, residue_name = a.het.id, a.het._name
chain_id = a.chain.id if a.chain is not None else ""
residue_id = int("".join([c for c in id_ if c.isdigit() or c == "-"]))
insert_code = id_[-1] if id_ and id_[-1].isalpha() else ""
atom_name = a._name or ""
atom_name = " " + atom_name if len(atom_name) < 4 else atom_name
occupancy = " 1.00"
line = line.format(
"HETATM" if isinstance(a.het, Ligand) or a._is_hetatm else "ATOM",
a.id, atom_name, residue_name, chain_id, residue_id, insert_code,
"{:.3f}".format(a.location[0]) if a.location[0] is not None else "",
"{:.3f}".format(a.location[1]) if a.location[1] is not None else "",
"{:.3f}".format(a.location[2]) if a.location[2] is not None else "",
a.bvalue if a.bvalue is not None else "", a.element or "",
str(int(a.charge))[::-1] if a.charge else "",
)
lines.append(line)
if a.anisotropy != [0, 0, 0, 0, 0, 0]:
lines.append(atom_to_anisou_line(a, atom_name,
residue_name, chain_id, residue_id, insert_code))
def atom_to_anisou_line(a, name, res_name, chain_id, res_id, insert):
"""Converts an :py:class:`.Atom` to an ANISOU record.
:param Atom a: The Atom to pack.
:param str name: The atom name to use.
:param str res_name: The residue name to use.
:param str chain_id: The chain ID to use.
:param str res_id: The residue ID to use.
:param str insert: The residue insert code to use.
:rtype: ``str``"""
line = "ANISOU{:5} {:4} {:3} {:1}{:4}{:1} "
line += "{:>7}{:>7}{:>7}{:>7}{:>7}{:>7} {:>2}{:2}"
anisotropy = [round(x * 10000 )for x in a.anisotropy]
line = line.format(
a.id, name, res_name, chain_id, res_id, insert,
anisotropy[0], anisotropy[1], anisotropy[2],
anisotropy[3], anisotropy[4], anisotropy[5],
a.element if a.element else "",
str(int(a.charge))[::-1] if a.charge else "",
)
return line
|
samirelanduk/molecupy
|
atomium/pdb.py
|
Python
|
mit
| 23,845
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Data Flow Operations."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import common_shapes
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_data_flow_ops import *
# pylint: enable=wildcard-import
def _as_type_list(dtypes):
"""Convert dtypes to a list of types."""
assert dtypes is not None
if not (isinstance(dtypes, list) or isinstance(dtypes, tuple)):
# We have a single type.
return [dtypes]
else:
# We have a list or tuple of types.
return list(dtypes)
def _as_shape_list(shapes, dtypes, unknown_dim_allowed=False,
unknown_rank_allowed=False):
"""Convert shapes to a list of tuples of int (or None)."""
if unknown_dim_allowed:
if (not isinstance(shapes, collections.Sequence)
or not shapes
or any(shape is None or isinstance(shape, int) for shape in shapes)):
raise ValueError(
"When providing partial shapes, a list of shapes must be provided.")
if shapes is None: return None
if isinstance(shapes, tensor_shape.TensorShape):
shapes = [shapes]
if not isinstance(shapes, (tuple, list)):
raise TypeError(
"shapes must be a TensorShape or a list or tuple of TensorShapes.")
if all(shape is None or isinstance(shape, int) for shape in shapes):
# We have a single shape.
shapes = [shapes]
shapes = [tensor_shape.as_shape(shape) for shape in shapes]
if not unknown_dim_allowed:
if any([not shape.is_fully_defined() for shape in shapes]):
raise ValueError("All shapes must be fully defined: %s" % shapes)
if not unknown_rank_allowed:
if any([shape.dims is None for shape in shapes]):
raise ValueError("All shapes must have a defined rank: %s" % shapes)
return shapes
# pylint: disable=protected-access
class QueueBase(object):
"""Base class for queue implementations.
A queue is a TensorFlow data structure that stores tensors across
multiple steps, and exposes operations that enqueue and dequeue
tensors.
Each queue element is a tuple of one or more tensors, where each
tuple component has a static dtype, and may have a static shape. The
queue implementations support versions of enqueue and dequeue that
handle single elements, versions that support enqueuing and
dequeuing a batch of elements at once.
See [`tf.FIFOQueue`](#FIFOQueue) and
[`tf.RandomShuffleQueue`](#RandomShuffleQueue) for concrete
implementations of this class, and instructions on how to create
them.
@@enqueue
@@enqueue_many
@@dequeue
@@dequeue_many
@@size
@@close
"""
def __init__(self, dtypes, shapes, queue_ref):
"""Constructs a queue object from a queue reference.
Args:
dtypes: A list of types. The length of dtypes must equal the number
of tensors in each element.
shapes: Constraints on the shapes of tensors in an element:
A list of shape tuples or None. This list is the same length
as dtypes. If the shape of any tensors in the element are constrained,
all must be; shapes can be None if the shapes should not be constrained.
queue_ref: The queue reference, i.e. the output of the queue op.
"""
self._dtypes = dtypes
if shapes is not None:
self._shapes = [tensor_shape.TensorShape(s) for s in shapes]
else:
self._shapes = [tensor_shape.unknown_shape() for _ in self._dtypes]
self._queue_ref = queue_ref
self._name = self._queue_ref.op.name.split("/")[-1]
@staticmethod
def from_list(index, queues):
"""Create a queue using the queue reference from `queues[index]`.
Args:
index: An integer scalar tensor that determines the input that gets
selected.
queues: A list of `QueueBase` objects.
Returns:
A `QueueBase` object.
Raises:
TypeError: When `queues` is not a list of `QueueBase` objects,
or when the data types of `queues` are not all the same.
"""
if ((not queues) or
(not isinstance(queues, list)) or
(not all(isinstance(x, QueueBase) for x in queues))):
raise TypeError("A list of queues expected")
dtypes = queues[0].dtypes
if not all([dtypes == q.dtypes for q in queues[1:]]):
raise TypeError("Queues do not have matching component dtypes.")
queue_refs = [x.queue_ref for x in queues]
selected_queue = control_flow_ops.ref_select(index, queue_refs)
# TODO(josh11b): Unify the shapes of the queues too?
return QueueBase(dtypes=dtypes, shapes=None, queue_ref=selected_queue)
@property
def queue_ref(self):
"""The underlying queue reference."""
return self._queue_ref
@property
def name(self):
"""The name of the underlying queue."""
return self._queue_ref.op.name
@property
def dtypes(self):
"""The list of dtypes for each component of a queue element."""
return self._dtypes
def _check_enqueue_dtypes(self, vals):
"""Returns `vals` as a list of `Tensor`s, having checked their dtypes.
Args:
vals: A tensor or a list of tensors, corresponding to an
enqueue(_many) tuple.
Returns:
A list of `Tensor` objects.
"""
if not isinstance(vals, (list, tuple)):
vals = [vals]
tensors = []
for i, (val, dtype) in enumerate(zip(vals, self._dtypes)):
tensors.append(ops.convert_to_tensor(val, dtype=dtype,
name="component_%d" % i))
return tensors
def enqueue(self, vals, name=None):
"""Enqueues one element to this queue.
If the queue is full when this operation executes, it will block
until the element has been enqueued.
Args:
vals: The tuple of `Tensor` objects to be enqueued.
name: A name for the operation (optional).
Returns:
The operation that enqueues a new tuple of tensors to the queue.
"""
if not isinstance(vals, (list, tuple)):
vals = [vals]
with ops.op_scope(vals, name, "%s_enqueue" % self._name) as scope:
vals = self._check_enqueue_dtypes(vals)
# NOTE(mrry): Not using a shape function because we need access to
# the `QueueBase` object.
for val, shape in zip(vals, self._shapes):
val.get_shape().assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue(self._queue_ref, vals, name=scope)
def enqueue_many(self, vals, name=None):
"""Enqueues zero or elements to this queue.
This operation slices each component tensor along the 0th dimension to
make multiple queue elements. All of the tensors in `vals` must have the
same size in the 0th dimension.
If the queue is full when this operation executes, it will block
until all of the elements have been enqueued.
Args:
vals: The tensor or tuple of tensors from which the queue elements
are taken.
name: A name for the operation (optional).
Returns:
The operation that enqueues a batch of tuples of tensors to the queue.
"""
if not isinstance(vals, (list, tuple)):
vals = [vals]
with ops.op_scope(vals, name, "%s_EnqueueMany" % self._name) as scope:
vals = self._check_enqueue_dtypes(vals)
# NOTE(mrry): Not using a shape function because we need access to
# the `QueueBase` object.
batch_dim = vals[0].get_shape().with_rank_at_least(1)[0]
for val, shape in zip(vals, self._shapes):
batch_dim = batch_dim.merge_with(
val.get_shape().with_rank_at_least(1)[0])
val.get_shape()[1:].assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue_many(
self._queue_ref, vals, name=scope)
def dequeue(self, name=None):
"""Dequeues one element from this queue.
If the queue is empty when this operation executes, it will block
until there is an element to dequeue.
Args:
name: A name for the operation (optional).
Returns:
The tuple of tensors that was dequeued.
"""
if name is None:
name = "%s_Dequeue" % self._name
ret = gen_data_flow_ops._queue_dequeue(
self._queue_ref, self._dtypes, name=name)
# NOTE(mrry): Not using a shape function because we need access to
# the `QueueBase` object.
op = ret[0].op
for output, shape in zip(op.values(), self._shapes):
output.set_shape(shape)
return ret if len(ret) != 1 else ret[0]
def dequeue_many(self, n, name=None):
"""Dequeues and concatenates `n` elements from this queue.
This operation concatenates queue-element component tensors along
the 0th dimension to make a single component tensor. All of the
components in the dequeued tuple will have size `n` in the 0th dimension.
If the queue contains fewer than `n` elements when this operation
executes, it will block until `n` elements have been dequeued.
Args:
n: A scalar `Tensor` containing the number of elements to dequeue.
name: A name for the operation (optional).
Returns:
The tuple of concatenated tensors that was dequeued.
"""
if name is None:
name = "%s_DequeueMany" % self._name
ret = gen_data_flow_ops._queue_dequeue_many(
self._queue_ref, n, self._dtypes, name=name)
# NOTE(mrry): Not using a shape function because we need access to
# the Queue object.
op = ret[0].op
batch_dim = tensor_shape.Dimension(tensor_util.constant_value(op.inputs[1]))
for output, shape in zip(op.values(), self._shapes):
output.set_shape(tensor_shape.TensorShape([batch_dim]).concatenate(shape))
return ret if len(ret) != 1 else ret[0]
def close(self, cancel_pending_enqueues=False, name=None):
"""Closes this queue.
This operation signals that no more elements will be enqueued in
the given queue. Subsequent `enqueue` and `enqueue_many`
operations will fail. Subsequent `dequeue` and `dequeue_many`
operations will continue to succeed if sufficient elements remain
in the queue. Subsequent `dequeue` and `dequeue_many` operations
that would block will fail immediately.
If `cancel_pending_enqueues` is `True`, all pending requests will also
be cancelled.
Args:
cancel_pending_enqueues: (Optional.) A boolean, defaulting to
`False` (described above).
name: A name for the operation (optional).
Returns:
The operation that closes the queue.
"""
if name is None:
name = "%s_Close" % self._name
return gen_data_flow_ops._queue_close(
self._queue_ref, cancel_pending_enqueues=cancel_pending_enqueues,
name=name)
def size(self, name=None):
"""Compute the number of elements in this queue.
Args:
name: A name for the operation (optional).
Returns:
A scalar tensor containing the number of elements in this queue.
"""
if name is None:
name = "%s_Size" % self._name
return gen_data_flow_ops._queue_size(self._queue_ref, name=name)
class RandomShuffleQueue(QueueBase):
"""A queue implementation that dequeues elements in a random order.
See [`tf.QueueBase`](#QueueBase) for a description of the methods on
this class.
@@__init__
"""
def __init__(self, capacity, min_after_dequeue, dtypes, shapes=None,
seed=None, shared_name=None, name="random_shuffle_queue"):
"""Create a queue that dequeues elements in a random order.
A `RandomShuffleQueue` has bounded capacity; supports multiple
concurrent producers and consumers; and provides exactly-once
delivery.
A `RandomShuffleQueue` holds a list of up to `capacity`
elements. Each element is a fixed-length tuple of tensors whose
dtypes are described by `dtypes`, and whose shapes are optionally
described by the `shapes` argument.
If the `shapes` argument is specified, each component of a queue
element must have the respective fixed shape. If it is
unspecified, different queue elements may have different shapes,
but the use of `dequeue_many` is disallowed.
The `min_after_dequeue` argument allows the caller to specify a
minimum number of elements that will remain in the queue after a
`dequeue` or `dequeue_many` operation completes, to ensure a
minimum level of mixing of elements. This invariant is maintained
by blocking those operations until sufficient elements have been
enqueued. The `min_after_dequeue` argument is ignored after the
queue has been closed.
Args:
capacity: An integer. The upper bound on the number of elements
that may be stored in this queue.
min_after_dequeue: An integer (described above).
dtypes: A list of `DType` objects. The length of `dtypes` must equal
the number of tensors in each queue element.
shapes: (Optional.) A list of fully-defined `TensorShape` objects,
with the same length as `dtypes` or `None`.
seed: A Python integer. Used to create a random seed. See
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for behavior.
shared_name: (Optional.) If non-empty, this queue will be shared under
the given name across multiple sessions.
name: Optional name for the queue operation.
"""
dtypes = _as_type_list(dtypes)
shapes = _as_shape_list(shapes, dtypes)
seed1, seed2 = random_seed.get_seed(seed)
queue_ref = gen_data_flow_ops._random_shuffle_queue(
component_types=dtypes, shapes=shapes, capacity=capacity,
min_after_dequeue=min_after_dequeue, seed=seed1, seed2=seed2,
shared_name=shared_name, name=name)
super(RandomShuffleQueue, self).__init__(dtypes, shapes, queue_ref)
class FIFOQueue(QueueBase):
"""A queue implementation that dequeues elements in first-in-first out order.
See [`tf.QueueBase`](#QueueBase) for a description of the methods on
this class.
@@__init__
"""
def __init__(self, capacity, dtypes, shapes=None, shared_name=None,
name="fifo_queue"):
"""Creates a queue that dequeues elements in a first-in first-out order.
A `FIFOQueue` has bounded capacity; supports multiple concurrent
producers and consumers; and provides exactly-once delivery.
A `FIFOQueue` holds a list of up to `capacity` elements. Each
element is a fixed-length tuple of tensors whose dtypes are
described by `dtypes`, and whose shapes are optionally described
by the `shapes` argument.
If the `shapes` argument is specified, each component of a queue
element must have the respective fixed shape. If it is
unspecified, different queue elements may have different shapes,
but the use of `dequeue_many` is disallowed.
Args:
capacity: An integer. The upper bound on the number of elements
that may be stored in this queue.
dtypes: A list of `DType` objects. The length of `dtypes` must equal
the number of tensors in each queue element.
shapes: (Optional.) A list of fully-defined `TensorShape` objects,
with the same length as `dtypes` or `None`.
shared_name: (Optional.) If non-empty, this queue will be shared under
the given name across multiple sessions.
name: Optional name for the queue operation.
"""
dtypes = _as_type_list(dtypes)
shapes = _as_shape_list(shapes, dtypes)
queue_ref = gen_data_flow_ops._fifo_queue(
component_types=dtypes, shapes=shapes, capacity=capacity,
shared_name=shared_name, name=name)
super(FIFOQueue, self).__init__(dtypes, shapes, queue_ref)
class PaddingFIFOQueue(QueueBase):
""""A FIFOQueue that supports batching variable-sized tensors by padding.
A `PaddingFIFOQueue` may contain components with dynamic shape, while also
supporting `dequeue_many`. See the constructor for more details.
See [`tf.QueueBase`](#QueueBase) for a description of the methods on
this class.
@@__init__
"""
def __init__(self, capacity, dtypes, shapes, shared_name=None,
name="padding_fifo_queue"):
"""Creates a queue that dequeues elements in a first-in first-out order.
A `PaddingFIFOQueue` has bounded capacity; supports multiple concurrent
producers and consumers; and provides exactly-once delivery.
A `PaddingFIFOQueue` holds a list of up to `capacity` elements. Each
element is a fixed-length tuple of tensors whose dtypes are
described by `dtypes`, and whose shapes are described by the `shapes`
argument.
The `shapes` argument must be specified; each component of a queue
element must have the respective shape. Shapes of fixed
rank but variable size are allowed by setting any shape dimension to None.
In this case, the inputs' shape may vary along the given dimension, and
`dequeue_many` will pad the given dimension with zeros up to the maximum
shape of all elements in the given batch.
Args:
capacity: An integer. The upper bound on the number of elements
that may be stored in this queue.
dtypes: A list of `DType` objects. The length of `dtypes` must equal
the number of tensors in each queue element.
shapes: A list of `TensorShape` objects, with the same length as
`dtypes`. Any dimension in the `TensorShape` containing value
`None` is dynamic and allows values to be enqueued with
variable size in that dimension.
shared_name: (Optional.) If non-empty, this queue will be shared under
the given name across multiple sessions.
name: Optional name for the queue operation.
Raises:
ValueError: If shapes is not a list of shapes, or the lengths of dtypes
and shapes do not match.
"""
dtypes = _as_type_list(dtypes)
shapes = _as_shape_list(shapes, dtypes, unknown_dim_allowed=True)
if len(dtypes) != len(shapes):
raise ValueError("Shapes must be provided for all components, "
"but received %d dtypes and %d shapes."
% (len(dtypes), len(shapes)))
queue_ref = gen_data_flow_ops._padding_fifo_queue(
component_types=dtypes, shapes=shapes, capacity=capacity,
shared_name=shared_name, name=name)
super(PaddingFIFOQueue, self).__init__(dtypes, shapes, queue_ref)
# TODO(josh11b): class BatchQueue(QueueBase):
def initialize_all_tables(name="init_all_tables"):
"""Returns an Op that initializes all tables of the default graph.
Args:
name: Optional name for the initialization op.
Returns:
An Op that initializes all tables. Note that if there are
not tables the returned Op is a NoOp.
"""
initializers = ops.get_collection(ops.GraphKeys.TABLE_INITIALIZERS)
if initializers:
return control_flow_ops.group(*initializers, name=name)
return control_flow_ops.no_op(name=name)
ops.NoGradient("LookupTableFind")
ops.NoGradient("LookupTableSize")
ops.NoGradient("HashTable")
ops.NoGradient("InitializeTable")
ops.RegisterShape("QueueSize")(common_shapes.scalar_shape)
ops.RegisterShape("Queue")(common_shapes.scalar_shape)
ops.RegisterShape("FIFOQueue")(common_shapes.scalar_shape)
ops.RegisterShape("PaddingFIFOQueue")(common_shapes.scalar_shape)
ops.RegisterShape("RandomShuffleQueue")(common_shapes.scalar_shape)
def _ScalarToVoidShape(op):
"""Shape function for ops that take a scalar and produce no outputs."""
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
return []
# NOTE(mrry): The following ops use higher-level information in the
# Queue class to provide shape information.
ops.RegisterShape("QueueDequeue")(common_shapes.unknown_shape)
ops.RegisterShape("QueueDequeueMany")(common_shapes.unknown_shape)
ops.RegisterShape("QueueEnqueue")(common_shapes.unknown_shape)
ops.RegisterShape("QueueEnqueueMany")(common_shapes.unknown_shape)
ops.RegisterShape("QueueClose")(_ScalarToVoidShape)
ops.RegisterShape("Stack")(common_shapes.scalar_shape)
ops.RegisterShape("StackPush")(common_shapes.unknown_shape)
ops.RegisterShape("StackPop")(common_shapes.unknown_shape)
ops.RegisterShape("StackClose")(_ScalarToVoidShape)
@ops.RegisterShape("DynamicPartition")
def _DynamicPartitionShape(op):
"""Shape function for data_flow_ops.dynamic_partition."""
data_shape = op.inputs[0].get_shape()
partitions_shape = op.inputs[1].get_shape()
# If we don't know the rank of partitions, we don't know anything
mid = partitions_shape.ndims
if mid is None:
result_shape = tensor_shape.unknown_shape()
else:
# data_shape must start with partitions_shape
partitions_shape.assert_is_compatible_with(data_shape[:mid])
# The partition shape is dynamic in the 0th dimension, and matches
# data_shape in the remaining dimensions.
result_shape = tensor_shape.TensorShape([None]).concatenate(
data_shape[mid:])
return [result_shape] * op.get_attr("num_partitions")
@ops.RegisterShape("DynamicStitch")
def _DynamicStitchShape(op):
"""Shape function for data_flow_ops.dynamic_stitch."""
num_partitions = op.get_attr("N")
indices_shapes = [t.get_shape() for t in op.inputs[0:num_partitions]]
data_shapes = [t.get_shape() for t in op.inputs[num_partitions:]]
output_shape = tensor_shape.unknown_shape()
extra_shape = tensor_shape.TensorShape(None)
for indices_shape, data_shape in zip(indices_shapes, data_shapes):
indices_ndims = indices_shape.ndims
if indices_ndims is not None:
# Assert that data_shape starts with indices_shape
indices_shape.merge_with(data_shape[:indices_ndims])
# The rest belongs to output
extra_shape = extra_shape.merge_with(data_shape[indices_ndims:])
return [tensor_shape.TensorShape([None]).concatenate(extra_shape)]
@ops.RegisterShape("LookupTableFind")
def _LookupTableFindShape(op):
"""Shape function for data_flow_ops._lookup_table_find."""
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
shape_in = op.inputs[1].get_shape()
return [shape_in]
@ops.RegisterShape("LookupTableSize")
def _LookupTableSizeShape(op):
"""Shape function for data_flow_ops._lookup_table_find."""
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
return [tensor_shape.scalar()]
@ops.RegisterShape("HashTable")
def _HashTableShape(_):
"""Shape function for data_flow_ops._hash_table."""
return [tensor_shape.scalar()]
@ops.RegisterShape("InitializeTable")
def _InitializeLookupTableShape(op):
"""Shape function for data_flow_ops._initialize_table."""
op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
keys_shape = op.inputs[1].get_shape().with_rank(1)
op.inputs[2].get_shape().merge_with(keys_shape)
return []
|
martinbede/second-sight
|
tensorflow/python/ops/data_flow_ops.py
|
Python
|
apache-2.0
| 23,716
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.