text stringlengths 4 1.02M | meta dict |
|---|---|
import uuid
from datetime import datetime
from flask.ext.login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from . import db, login_manager
def create_only_slug(form):
if form.slug.data == "":
slug = str(datetime.now())[0:10] + "-" + str(uuid.uuid4())[0:4]
else:
slug = form.slug.data
return slug
# 用户数据模型
class User(UserMixin, db.Document):
"""
admin test count
User(username="admin", password="admin", email="admin@admin.com", url="admin.admin",screenName="admin", group="administrator").save()
"""
username = db.StringField(max_length=25, required=True, unique=True)
password = db.StringField()
password_hash = db.StringField(max_length=128, required=True)
email = db.EmailField(required=True, unique=True, default="")
url = db.StringField(max_length=30, default="")
screenName = db.StringField(max_length=25, default="")
group = db.StringField(default='subscriber', choices=["administrator", "editor", "subscriber"])
meta = {
'indexes': [
'username',
'email'
]
}
def clean(self):
self.password_hash = generate_password_hash(self.password)
self.password = None
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def set_and_save(self, form):
self.username = form.username.data
self.email = form.email.data
self.password = form.password.data
self.url = form.url.data
self.screenName = form.screenName.data
self.group = form.group.data
self.save()
@login_manager.user_loader
def user_load(user_id):
return User.objects(id=user_id).first()
# 评论数据模型
class Comment(db.EmbeddedDocument):
"""
comment1 = Comment(author_name="lleohao", content="good post")
"""
author_name = db.StringField(required=True)
author_email = db.StringField()
author_url = db.StringField()
created = db.DateTimeField(default=datetime.now, required=True)
content = db.StringField(required=True)
# 分类数据模型
class Category(db.Document):
"""
Category(name="默认分类", slug="normal", description="这是系统默认的分类")
Category(name="Python", slug="python", description="").save()
"""
name = db.StringField(required=True, unique=True)
slug = db.StringField(required=True, unique=True)
description = db.StringField()
meta = {
'indexes': [
'name',
'$name',
'#name'
]
}
def set_val(self, form):
self.name = form.name.data
self.slug = form.slug.data
self.description = form.description.data
# 内容数据模型
class Content(db.DynamicDocument):
"""
post = Content(title="test post", slug="test", status=True, type="post")
"""
created = db.DateTimeField(default=datetime.now, required=True)
title = db.StringField(max_length=255, required=True)
slug = db.StringField(max_length=255, required=True, unique=True)
category = db.ReferenceField(Category)
tags = db.ListField(db.StringField())
md_text = db.StringField()
html_text = db.StringField()
description = db.StringField()
author = db.StringField()
status = db.BooleanField(default=False)
type = db.StringField(choices=["post", "page"])
comments = db.ListField(db.EmbeddedDocumentField(Comment))
meta = {
'indexes': [
'status',
'category',
'type'
],
'ordering': [
'-created'
]
}
def set_val(self, form, author, html_text, type):
self.created = datetime.now()
self.title = form.title.data
self.slug = create_only_slug(form)
self.md_text = form.content.data
self.html_text = html_text
self.author = author
if type == 'post':
if form.tags.data is not "":
self.tags = form.tags.data.split(",")
else:
self.tags = []
self.category = Category.objects(slug=form.category.data).first()
else:
self.tags = None
self.category = None
def clean(self):
op = Options.objects().first()
self.description = op.title + " - " + op.description + " - " + self.md_text[0:150]
# 网站设置属性数据模型
class Options(db.Document):
url = db.StringField(required=True)
title = db.StringField(required=True)
keyword = db.StringField()
description = db.StringField()
duoshuo_name = db.StringField(default="")
# 做为自增长 id 用
# 暂不需要
comment_index = db.IntField(default=0, required=True)
content_index = db.IntField(default=0)
def set_and_save(self, form):
self.url = form.url.data
self.title = form.title.data
self.keyword = form.keyword.data
self.description = form.description.data
self.duoshuo_name = form.duoshuo_name.data
self.save()
| {
"content_hash": "0b684e091f97ad73c2c2cc0081ea7c2c",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 137,
"avg_line_length": 29.97590361445783,
"alnum_prop": 0.6155546623794212,
"repo_name": "lleobox/PYtypecho",
"id": "2dcb653b9f41a12e41f0996ca6b4a48e6e5413a9",
"size": "5106",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/modules.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "61089"
},
{
"name": "HTML",
"bytes": "48496"
},
{
"name": "JavaScript",
"bytes": "978068"
},
{
"name": "Python",
"bytes": "31028"
}
],
"symlink_target": ""
} |
from oslo_serialization import jsonutils
from nova import test
from nova.tests import fixtures
from nova.tests.unit.api.openstack import fakes
class UrlmapTest(test.NoDBTestCase):
def setUp(self):
super(UrlmapTest, self).setUp()
self.useFixture(fixtures.GlanceFixture(self))
def test_path_version_v2(self):
# Test URL path specifying v2 returns v2 content.
req = fakes.HTTPRequest.blank('/v2/')
req.accept = "application/json"
res = req.get_response(fakes.wsgi_app_v21(v2_compatible=True))
self.assertEqual(200, res.status_int)
self.assertEqual("application/json", res.content_type)
body = jsonutils.loads(res.body)
self.assertEqual('v2.0', body['version']['id'])
def test_content_type_version_v2(self):
# Test Content-Type specifying v2 returns v2 content.
req = fakes.HTTPRequest.blank('/')
req.content_type = "application/json;version=2"
req.accept = "application/json"
res = req.get_response(fakes.wsgi_app_v21(v2_compatible=True))
self.assertEqual(200, res.status_int)
self.assertEqual("application/json", res.content_type)
body = jsonutils.loads(res.body)
self.assertEqual('v2.0', body['version']['id'])
def test_accept_version_v2(self):
# Test Accept header specifying v2 returns v2 content.
req = fakes.HTTPRequest.blank('/')
req.accept = "application/json;version=2"
res = req.get_response(fakes.wsgi_app_v21(v2_compatible=True))
self.assertEqual(200, res.status_int)
self.assertEqual("application/json", res.content_type)
body = jsonutils.loads(res.body)
self.assertEqual('v2.0', body['version']['id'])
def test_accept_content_type(self):
# Test Accept header specifying JSON returns JSON content.
url = ('/v2/%s/images/cedef40a-ed67-4d10-800e-17455edce175' %
fakes.FAKE_PROJECT_ID)
req = fakes.HTTPRequest.blank(url)
req.accept = "application/xml;q=0.8, application/json"
res = req.get_response(fakes.wsgi_app_v21())
self.assertEqual(200, res.status_int)
self.assertEqual("application/json", res.content_type)
body = jsonutils.loads(res.body)
self.assertEqual('cedef40a-ed67-4d10-800e-17455edce175',
body['image']['id'])
def test_path_version_v21(self):
# Test URL path specifying v2.1 returns v2.1 content.
req = fakes.HTTPRequest.blank('/v2.1/')
req.accept = "application/json"
res = req.get_response(fakes.wsgi_app_v21())
self.assertEqual(200, res.status_int)
self.assertEqual("application/json", res.content_type)
body = jsonutils.loads(res.body)
self.assertEqual('v2.1', body['version']['id'])
def test_content_type_version_v21(self):
# Test Content-Type specifying v2.1 returns v2 content.
req = fakes.HTTPRequest.blank('/')
req.content_type = "application/json;version=2.1"
req.accept = "application/json"
res = req.get_response(fakes.wsgi_app_v21())
self.assertEqual(200, res.status_int)
self.assertEqual("application/json", res.content_type)
body = jsonutils.loads(res.body)
self.assertEqual('v2.1', body['version']['id'])
def test_accept_version_v21(self):
# Test Accept header specifying v2.1 returns v2.1 content.
req = fakes.HTTPRequest.blank('/')
req.accept = "application/json;version=2.1"
res = req.get_response(fakes.wsgi_app_v21())
self.assertEqual(200, res.status_int)
self.assertEqual("application/json", res.content_type)
body = jsonutils.loads(res.body)
self.assertEqual('v2.1', body['version']['id'])
def test_accept_content_type_v21(self):
# Test Accept header specifying JSON returns JSON content.
req = fakes.HTTPRequest.blank('/')
req.content_type = "application/json;version=2.1"
req.accept = "application/xml;q=0.8, application/json"
res = req.get_response(fakes.wsgi_app_v21())
self.assertEqual(200, res.status_int)
self.assertEqual("application/json", res.content_type)
body = jsonutils.loads(res.body)
self.assertEqual('v2.1', body['version']['id'])
def test_script_name_path_info(self):
"""Ensure URLMap preserves SCRIPT_NAME and PATH_INFO correctly."""
data = (
('', '', ''),
('/', '', '/'),
('/v2', '/v2', ''),
('/v2/', '/v2', '/'),
('/v2.1', '/v2.1', ''),
('/v2.1/', '/v2.1', '/'),
('/v2/foo', '/v2', '/foo'),
('/v2.1/foo', '/v2.1', '/foo'),
('/bar/baz', '', '/bar/baz')
)
app = fakes.wsgi_app_v21()
for url, exp_script_name, exp_path_info in data:
req = fakes.HTTPRequest.blank(url)
req.get_response(app)
# The app uses /v2 as the base URL :(
exp_script_name = '/v2' + exp_script_name
self.assertEqual(exp_script_name, req.environ['SCRIPT_NAME'])
self.assertEqual(exp_path_info, req.environ['PATH_INFO'])
| {
"content_hash": "ec03f6d7ef4c5b5ea2c1dd07037215cd",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 74,
"avg_line_length": 43.90756302521008,
"alnum_prop": 0.6072727272727273,
"repo_name": "klmitch/nova",
"id": "4ab837713a1422791f65602d8b9f76b2e65ffd4e",
"size": "5861",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/tests/unit/api/openstack/compute/test_urlmap.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "851"
},
{
"name": "HTML",
"bytes": "1386"
},
{
"name": "PHP",
"bytes": "44222"
},
{
"name": "Python",
"bytes": "22328409"
},
{
"name": "Shell",
"bytes": "29138"
},
{
"name": "Smarty",
"bytes": "405441"
}
],
"symlink_target": ""
} |
import xbmc, xbmcgui
import Config
#get actioncodes from https://github.com/xbmc/xbmc/blob/master/xbmc/guilib/Key.h
def FormatMenuString( title, option ):
if len( option ) > 0:
return title + " [" + option + "]"
else:
return title
def DoAddDialog():
platform = ""
roms_dir = ""
roms_filter = ""
exe_file = ""
exe_args = "%ROM%"
dialog = xbmcgui.Dialog()
indialog = 1
while indialog > 0:
selection = dialog.select( "Add Emulator",
[ FormatMenuString( "Platform", platform ), FormatMenuString( "Roms dir", roms_dir ), FormatMenuString( "Roms filter", roms_filter ), FormatMenuString( "Exe file", exe_file ),
FormatMenuString( "Exe arguments", exe_args ), "Add", "Cancel" ])
if selection == 0:
keyboard = xbmc.Keyboard("", "Enter Platform")
keyboard.doModal()
if (keyboard.isConfirmed()):
platform = keyboard.getText()
elif selection == 1:
roms_dir = dialog.browse(0,"Locate emulator roms","files","", False, False, "")
elif selection == 2:
keyboard = xbmc.Keyboard("*.zip|*.7z|*.bin", "Rom filter" )
keyboard.doModal()
if (keyboard.isConfirmed()):
roms_filter = keyboard.getText()
elif selection == 3:
#0 = showandgetdirectory
#1 = showandgetfile
#2 = showandgetimage
exe_file = dialog.browse(1,"Locate emulator executable","files","", False, False, "")
elif selection == 4:
keyboard = xbmc.Keyboard( "%ROM%", "Enter exe arguments")
keyboard.doModal()
if (keyboard.isConfirmed()):
exe_args = keyboard.getText()
elif selection == 5:
validPlatform = len( platform ) > 0
validExe = len( exe_file ) > 0
validRomsDir = len( roms_dir ) > 0
isValid = validPlatform and validExe and validRomsDir
if isValid:
Config.config.AddEmulator( platform, roms_dir, roms_filter, exe_file, exe_args )
indialog = 0
else:
invalidFields = ""
if not validPlatform:
invalidFields = invalidFields + "Platform, "
if not validExe:
invalidFields = invalidFields + "Exe Path, "
if not validRomsDir:
invalidFields = invalidFields + "Roms Dir, "
if len( invalidFields ) > 0:
invalidFields = invalidFields[: len( invalidFields )-2 ]
dialog.ok( "Error", "The following fields have invalid values", invalidFields )
elif selection == 6 or selection < 0:
indialog = 0
del dialog
| {
"content_hash": "e6ccc6f3c7b7f435ebeb5ad2374095c1",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 178,
"avg_line_length": 31.73972602739726,
"alnum_prop": 0.659041864479931,
"repo_name": "peteward44/xbmc-romulator",
"id": "626164f6a60e987943092d777d439347c4c328ee",
"size": "2317",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugin.program.romulator/resources/lib/AddEmulatorDialog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1999"
},
{
"name": "Python",
"bytes": "91510"
},
{
"name": "Shell",
"bytes": "1453"
},
{
"name": "Visual Basic",
"bytes": "286"
}
],
"symlink_target": ""
} |
"""
eq
Usage:
eq hello
eq log [options]
eq -h | --help
eq --version
Options:
-h --help Shows this screen.
--version Shows version.
--limit=<dec> Number of results [default: 10].
--start-time=<Date/Time> Query time period defaults to 30 days to present.
--end-time=<Date/Time> Query time period defaults to 30 days to present.
--min-mag=<dec> Minimum magnitude to report.
--max-mag=<dec> Maximum magnitude to report.
--min-depth=<km> Minimum depth to report [default: -100].
--max-depth=<km> Maximum depth to report [default: 1000].
--eventid=<id> Event ID of event to return. All other options are ignored.
--orderby=<clause> [time | time-asc | magnitude | magnitude-asc].
--latitude=<dec> Latitude of origin of circle for query
--longitude=<dec> Longitude of origin of circle for query
--maxradius-km=<dec> Radius in km of circle for query - mutually exclusive with --maxradius-deg
--maxradius-deg=<dec> Radius in degrees of circle for query - multually exclusive with --maxradius-km
--minlatitude=<dec> Minimum latitude of rectangle for query
--maxlatitude=<dec> Maximum latitude of rectangle for query
--minlongitude=<dec> Minimum longitude of rectangle for query
--maxlongitude=<dec> Maximum longitude of rectangle for query
Examples:
eq hello
eq log
Help:
For help using this tool, please open an issue at
https://github.com/jacobnosal/eq-logger-cli/issues
or contact me on twitter @jacobnosal.
All times use ISO8601 Date/Time format. Unless a timezone is specified, UTC is assumed.
Examples:
2017-07-23, Implicit UTC timezone, and time at start of the day (00:00:00)
2017-07-23T03:45:11, Implicit UTC timezone.
2017-07-23T03:45:11+00:00, Explicit timezone.
These parameter sets are mutually exclusive. Mixing these parameters will query an
intersection of the geometries which can be empty. An error will not be returned.
"""
from inspect import getmembers, isclass
from docopt import docopt
from . import __version__ as VERSION
def main():
"""Main CLI entrypodec."""
import eq_logger.commands
options = docopt(__doc__, version=VERSION)
# Here we'll try to dynamically match the command the user is trying to run
# with a pre-defined command class we've already created.
for (key, value) in options.items():
if hasattr(eq_logger.commands, key) and value:
module = getattr(eq_logger.commands, key)
eq_logger.commands = getmembers(module, isclass)
command = [command[1]
for command in eq_logger.commands if command[0] != 'Base'][0]
command = command(options)
command.run()
| {
"content_hash": "5faf64e80fb0a4b5c5197bf267905281",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 108,
"avg_line_length": 37.77922077922078,
"alnum_prop": 0.6431763492609144,
"repo_name": "jacobnosal/eq-logger-cli",
"id": "4fd0c4ae81f519145ec39acbd0228874baf5fdea",
"size": "2909",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "eq_logger/cli.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14290"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division, with_statement
__author__ = 'Vlad Popovici'
__version__ = 0.15
def img_to_raw():
pass
| {
"content_hash": "ee86176d144a50e2c0dc080540a6c528",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 63,
"avg_line_length": 17.75,
"alnum_prop": 0.6549295774647887,
"repo_name": "higex/qpath",
"id": "52594d44afc018b4eec19272c19ef469fe83387d",
"size": "142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "io/import.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "566671"
}
],
"symlink_target": ""
} |
import os, sys # low level handling, such as command line stuff
import string # string methods available
import re # regular expressions
import getopt # comand line argument handling
import hashlib
from low import * # custom functions, written by myself
from misa import MisaSSR
import newick
from collections import defaultdict
import pickle
# =============================================================================
def show_help( ):
""" displays the program parameter list and usage information """
stdout( "usage: " + sys.argv[0] + " -f <path> " )
stdout( " " )
stdout( " option description" )
stdout( " -h help (this text here)" )
stdout( " -f orthologous region map" )
stdout( " -g all.gff" )
stdout( " -m gene2transcript2protein.map" )
stdout( " " )
sys.exit(1)
# =============================================================================
def handle_arguments():
""" verifies the presence of all necessary arguments and returns the data dir """
if len ( sys.argv ) == 1:
stderr( "no arguments provided." )
show_help()
try: # check for the right arguments
keys, values = getopt.getopt( sys.argv[1:], "hf:g:m:" )
except getopt.GetoptError:
stderr( "invalid arguments provided." )
show_help()
args = {}
for key, value in keys:
if key == '-f': args['file'] = value
if key == '-g': args['gff'] = value
if key == '-m': args['map'] = value
if not args.has_key('file'):
stderr( "orth.map file argument missing." )
show_help()
elif not file_exists( args.get('file') ):
stderr( "orth.map file does not exist." )
show_help()
if not args.has_key('map'):
stderr( "gene2transcript2protein map file argument missing." )
show_help()
elif not file_exists( args.get('map') ):
stderr( "gene2transcript2protein map file does not exist." )
show_help()
if not args.has_key('gff'):
stderr( "gff file argument missing." )
show_help()
elif not file_exists( args.get('gff') ):
stderr( "gff file does not exist." )
show_help()
return args
# =============================================================================
def coordinates_to_gene(file):
hash = {}
fo = open(file)
for line in fo:
cols = line.rstrip().split("\t")
if not cols[3] == "gene": continue
key = string.join([cols[0], cols[1], cols[4]], "|")
value = [re.search("ID=([^;]+);", cols[9]).group(1), cols[7]]
hash[key] = value
fo.close()
return hash
def gene_to_transcript(file):
hash = {}
fo = open(file)
for line in fo:
gid, tid = line.rstrip().split("\t")[0:2]
hash[gid] = tid
fo.close()
return hash
def get_gene_features(file):
exons = defaultdict(list)
introns = defaultdict(list)
fo = open(file)
for line in fo:
line = line.rstrip()
cols = line.split("\t")
if cols[3] != "exon" and cols[3] != "intron": continue
tid = re.search("Parent=([^;]+)", cols[9]).group(1)
start, stop = cols[4], cols[5]
strand = cols[7]
if cols[3] == "exon": exons[tid].append([start, stop, strand])
else: introns[tid].append([start, stop, strand])
# hash[cols[0] + "|" + cols[1] + "|" + cols[3]] =
return exons, introns
# =============================================================================
# === MAIN ====================================================================
# =============================================================================
def main( args ):
gene2transcript = gene_to_transcript(args['map'])
print >> sys.stderr, "gene2transcript loaded."
coord2gene = coordinates_to_gene(args['gff'])
print >> sys.stderr, "coord2gene loaded."
exons, introns = get_gene_features(args['gff'])
print >> sys.stderr, "gene features loaded."
fo = open(args['file'])
for line in fo:
if line.startswith("#"): continue
if len(line.split("\t")) < 9: continue
line = line.rstrip()
cols = line.split("\t")
species1, species2 = cols[0:2]
type = cols[2]
chr1, chr2 = cols[3], cols[6]
start1, start2 = cols[4], cols[7]
stop1, stop2 = cols[5], cols[8]
# remove regions with length=0 or where one region is significantly longer (150%)
l1 = int(cols[5]) - int(cols[4])
l2 = int(cols[8]) - int(cols[7])
if l1 == 0 or l2 == 0: continue
if float(max([l1,l2])) / float(min([l1,l2])) > 1.5 or (max([l1,l2]) - min([l1,l2])) > 5000: continue
if type == "gene":
key = string.join([species1, chr1, start1], "|")
gid, strand1 = coord2gene[key]
if not gene2transcript.has_key(gid): continue
tid1 = gene2transcript[gid]
exons1 = exons[tid1]
introns1 = introns[tid1]
key = string.join([species2, chr2, start2], "|")
gid, strand2 = coord2gene[key]
if not gene2transcript.has_key(gid): continue
tid2 = gene2transcript[gid]
exons2 = exons[tid2]
introns2 = introns[tid2]
if len(exons1) != len(exons2): continue
if len(introns1) != len(introns2): continue
cols.insert(6, strand1)
cols.insert(10, strand2)
# replace a gene by all its exons and introns
for i in range(len(exons1)):
ex1, ex2 = exons1[i], exons2[i]
cols[2] = "exon"
cols[4:7] = ex1
cols[8:11] = ex2
print string.join(cols, "\t")
for i in range(len(introns1)):
in1, in2 = introns1[i], introns2[i]
cols[2] = "intron"
cols[4:7] = in1
cols[8:11] = in2
print string.join(cols, "\t")
continue
key1 = string.join([species1, chr1, str(int(stop1) +1)], "|")
key2 = string.join([species2, chr2, str(int(stop2) +1)], "|")
gid, strand1 = coord2gene[key1]
gid, strand2 = coord2gene[key2]
cols.insert(6, strand1)
cols.insert(10, strand2)
print string.join(cols, "\t")
fo.close()
# print "exons equal:", ee, "exons unequal:", ue, "introns equal:", ei, "introns unqual:", ui, "both equal:", be
# =============================================================================
args = handle_arguments()
main( args )
| {
"content_hash": "4347196f23560a67ffce2b389c4b2876",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 113,
"avg_line_length": 32.136842105263156,
"alnum_prop": 0.5452014412053717,
"repo_name": "lotharwissler/bioinformatics",
"id": "dcc500c2eb9c6e0c4053dbaba356863be968ce59",
"size": "6125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/misa/qc-orthologous-regions.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "618072"
},
{
"name": "Ruby",
"bytes": "40829"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
def add_id_to_detail_page_url_scraped_obj_attributes(apps, schema_editor):
ScrapedObjAttr = apps.get_model("dynamic_scraper", "ScrapedObjAttr")
for soa in ScrapedObjAttr.objects.all():
if soa.attr_type == 'U':
soa.id_field = True
soa.save()
class Migration(migrations.Migration):
dependencies = [
('dynamic_scraper', '0003_auto_20150610_0906'),
]
operations = [
migrations.AddField(
model_name='scrapedobjattr',
name='id_field',
field=models.BooleanField(default=False),
),
migrations.RunPython(add_id_to_detail_page_url_scraped_obj_attributes)
]
| {
"content_hash": "981e12238c365ce37d2a7b53fb187cff",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 78,
"avg_line_length": 29.153846153846153,
"alnum_prop": 0.633245382585752,
"repo_name": "Brother-Simon/django-dynamic-scraper",
"id": "6a72a0d6c93d0547b33dfe740cf272103e8986fa",
"size": "782",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "dynamic_scraper/migrations/0004_scrapedobjattr_id_field.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "7279"
},
{
"name": "JavaScript",
"bytes": "1160"
},
{
"name": "Python",
"bytes": "222263"
},
{
"name": "Shell",
"bytes": "4570"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import os
import sys
try:
import io
StringIO = io.StringIO
except ImportError:
import StringIO
StringIO = StringIO.StringIO
import time
import socket
from tornado.web import Application
from tornado.web import RequestHandler
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from multiprocessing import Process
PY3 = sys.version_info[0] == 3
if PY3:
text_type = str
byte_type = bytes
else:
text_type = unicode
byte_type = str
def utf8(s):
if isinstance(s, text_type):
s = s.encode('utf-8')
return byte_type(s)
true_socket = socket.socket
PY3 = sys.version_info[0] == 3
if not PY3:
bytes = lambda s, *args: str(s)
class BubblesHandler(RequestHandler):
def get(self):
self.write(". o O 0 O o . o O 0 O o . o O 0 O o . o O 0 O o . o O 0 O o .")
class ComeHandler(RequestHandler):
def get(self):
self.write("<- HELLO WORLD ->")
class TornadoServer(object):
is_running = False
def __init__(self, port):
self.port = int(port)
self.process = None
@classmethod
def get_handlers(cls):
return Application([
(r"/go-for-bubbles/?", BubblesHandler),
(r"/come-again/?", ComeHandler),
])
def start(self):
def go(app, port, data={}):
from httpretty import HTTPretty
HTTPretty.disable()
http = HTTPServer(app)
http.listen(int(port))
IOLoop.instance().start()
app = self.get_handlers()
data = {}
args = (app, self.port, data)
self.process = Process(target=go, args=args)
self.process.start()
time.sleep(0.4)
def stop(self):
try:
os.kill(self.process.pid, 9)
except OSError:
self.process.terminate()
finally:
self.is_running = False
class TCPServer(object):
def __init__(self, port):
self.port = int(port)
def start(self):
def go(port):
from httpretty import HTTPretty
HTTPretty.disable()
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('localhost', port))
s.listen(True)
conn, addr = s.accept()
while True:
data = conn.recv(1024)
conn.send(b"RECEIVED: " + bytes(data))
print("*" * 100)
print(data)
print("*" * 100)
conn.close()
args = [self.port]
self.process = Process(target=go, args=args)
self.process.start()
time.sleep(0.4)
def stop(self):
try:
os.kill(self.process.pid, 9)
except OSError:
self.process.terminate()
finally:
self.is_running = False
class TCPClient(object):
def __init__(self, port):
self.port = int(port)
self.sock = true_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect(('localhost', self.port))
def send(self, what):
data = bytes(what, 'utf-8')
self.sock.sendall(data)
return self.sock.recv(len(data) + 11)
def close(self):
try:
self.sock.close()
except socket.error:
pass # already closed
def __del__(self):
self.close()
| {
"content_hash": "eea83e82a191c49d55703ea859b2c013",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 83,
"avg_line_length": 22.959731543624162,
"alnum_prop": 0.5559777842736042,
"repo_name": "CantemoInternal/HTTPretty",
"id": "03634a55173270e4a9cdc79cbce023611f936ed8",
"size": "4648",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/functional/testserver.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_wireless_controller_wtp
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_wireless_controller_wtp.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_wireless_controller_wtp_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_wtp': {
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour_profile': 'test_value_5',
'coordinate_enable': 'enable',
'coordinate_latitude': 'test_value_7',
'coordinate_longitude': 'test_value_8',
'coordinate_x': 'test_value_9',
'coordinate_y': 'test_value_10',
'image_download': 'enable',
'index': '12',
'ip_fragment_preventing': 'tcp-mss-adjust',
'led_state': 'enable',
'location': 'test_value_15',
'login_passwd': 'test_value_16',
'login_passwd_change': 'yes',
'mesh_bridge_enable': 'default',
'name': 'default_name_19',
'override_allowaccess': 'enable',
'override_ip_fragment': 'enable',
'override_lan': 'enable',
'override_led_state': 'enable',
'override_login_passwd_change': 'enable',
'override_split_tunnel': 'enable',
'override_wan_port_mode': 'enable',
'split_tunneling_acl_local_ap_subnet': 'enable',
'split_tunneling_acl_path': 'tunnel',
'tun_mtu_downlink': '29',
'tun_mtu_uplink': '30',
'wan_port_mode': 'wan-lan',
'wtp_id': 'test_value_32',
'wtp_mode': 'normal',
'wtp_profile': 'test_value_34'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_wtp.fortios_wireless_controller(input_data, fos_instance)
expected_data = {
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour-profile': 'test_value_5',
'coordinate-enable': 'enable',
'coordinate-latitude': 'test_value_7',
'coordinate-longitude': 'test_value_8',
'coordinate-x': 'test_value_9',
'coordinate-y': 'test_value_10',
'image-download': 'enable',
'index': '12',
'ip-fragment-preventing': 'tcp-mss-adjust',
'led-state': 'enable',
'location': 'test_value_15',
'login-passwd': 'test_value_16',
'login-passwd-change': 'yes',
'mesh-bridge-enable': 'default',
'name': 'default_name_19',
'override-allowaccess': 'enable',
'override-ip-fragment': 'enable',
'override-lan': 'enable',
'override-led-state': 'enable',
'override-login-passwd-change': 'enable',
'override-split-tunnel': 'enable',
'override-wan-port-mode': 'enable',
'split-tunneling-acl-local-ap-subnet': 'enable',
'split-tunneling-acl-path': 'tunnel',
'tun-mtu-downlink': '29',
'tun-mtu-uplink': '30',
'wan-port-mode': 'wan-lan',
'wtp-id': 'test_value_32',
'wtp-mode': 'normal',
'wtp-profile': 'test_value_34'
}
set_method_mock.assert_called_with('wireless-controller', 'wtp', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_wireless_controller_wtp_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_wtp': {
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour_profile': 'test_value_5',
'coordinate_enable': 'enable',
'coordinate_latitude': 'test_value_7',
'coordinate_longitude': 'test_value_8',
'coordinate_x': 'test_value_9',
'coordinate_y': 'test_value_10',
'image_download': 'enable',
'index': '12',
'ip_fragment_preventing': 'tcp-mss-adjust',
'led_state': 'enable',
'location': 'test_value_15',
'login_passwd': 'test_value_16',
'login_passwd_change': 'yes',
'mesh_bridge_enable': 'default',
'name': 'default_name_19',
'override_allowaccess': 'enable',
'override_ip_fragment': 'enable',
'override_lan': 'enable',
'override_led_state': 'enable',
'override_login_passwd_change': 'enable',
'override_split_tunnel': 'enable',
'override_wan_port_mode': 'enable',
'split_tunneling_acl_local_ap_subnet': 'enable',
'split_tunneling_acl_path': 'tunnel',
'tun_mtu_downlink': '29',
'tun_mtu_uplink': '30',
'wan_port_mode': 'wan-lan',
'wtp_id': 'test_value_32',
'wtp_mode': 'normal',
'wtp_profile': 'test_value_34'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_wtp.fortios_wireless_controller(input_data, fos_instance)
expected_data = {
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour-profile': 'test_value_5',
'coordinate-enable': 'enable',
'coordinate-latitude': 'test_value_7',
'coordinate-longitude': 'test_value_8',
'coordinate-x': 'test_value_9',
'coordinate-y': 'test_value_10',
'image-download': 'enable',
'index': '12',
'ip-fragment-preventing': 'tcp-mss-adjust',
'led-state': 'enable',
'location': 'test_value_15',
'login-passwd': 'test_value_16',
'login-passwd-change': 'yes',
'mesh-bridge-enable': 'default',
'name': 'default_name_19',
'override-allowaccess': 'enable',
'override-ip-fragment': 'enable',
'override-lan': 'enable',
'override-led-state': 'enable',
'override-login-passwd-change': 'enable',
'override-split-tunnel': 'enable',
'override-wan-port-mode': 'enable',
'split-tunneling-acl-local-ap-subnet': 'enable',
'split-tunneling-acl-path': 'tunnel',
'tun-mtu-downlink': '29',
'tun-mtu-uplink': '30',
'wan-port-mode': 'wan-lan',
'wtp-id': 'test_value_32',
'wtp-mode': 'normal',
'wtp-profile': 'test_value_34'
}
set_method_mock.assert_called_with('wireless-controller', 'wtp', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_wireless_controller_wtp_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'wireless_controller_wtp': {
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour_profile': 'test_value_5',
'coordinate_enable': 'enable',
'coordinate_latitude': 'test_value_7',
'coordinate_longitude': 'test_value_8',
'coordinate_x': 'test_value_9',
'coordinate_y': 'test_value_10',
'image_download': 'enable',
'index': '12',
'ip_fragment_preventing': 'tcp-mss-adjust',
'led_state': 'enable',
'location': 'test_value_15',
'login_passwd': 'test_value_16',
'login_passwd_change': 'yes',
'mesh_bridge_enable': 'default',
'name': 'default_name_19',
'override_allowaccess': 'enable',
'override_ip_fragment': 'enable',
'override_lan': 'enable',
'override_led_state': 'enable',
'override_login_passwd_change': 'enable',
'override_split_tunnel': 'enable',
'override_wan_port_mode': 'enable',
'split_tunneling_acl_local_ap_subnet': 'enable',
'split_tunneling_acl_path': 'tunnel',
'tun_mtu_downlink': '29',
'tun_mtu_uplink': '30',
'wan_port_mode': 'wan-lan',
'wtp_id': 'test_value_32',
'wtp_mode': 'normal',
'wtp_profile': 'test_value_34'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_wtp.fortios_wireless_controller(input_data, fos_instance)
delete_method_mock.assert_called_with('wireless-controller', 'wtp', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_wireless_controller_wtp_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'wireless_controller_wtp': {
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour_profile': 'test_value_5',
'coordinate_enable': 'enable',
'coordinate_latitude': 'test_value_7',
'coordinate_longitude': 'test_value_8',
'coordinate_x': 'test_value_9',
'coordinate_y': 'test_value_10',
'image_download': 'enable',
'index': '12',
'ip_fragment_preventing': 'tcp-mss-adjust',
'led_state': 'enable',
'location': 'test_value_15',
'login_passwd': 'test_value_16',
'login_passwd_change': 'yes',
'mesh_bridge_enable': 'default',
'name': 'default_name_19',
'override_allowaccess': 'enable',
'override_ip_fragment': 'enable',
'override_lan': 'enable',
'override_led_state': 'enable',
'override_login_passwd_change': 'enable',
'override_split_tunnel': 'enable',
'override_wan_port_mode': 'enable',
'split_tunneling_acl_local_ap_subnet': 'enable',
'split_tunneling_acl_path': 'tunnel',
'tun_mtu_downlink': '29',
'tun_mtu_uplink': '30',
'wan_port_mode': 'wan-lan',
'wtp_id': 'test_value_32',
'wtp_mode': 'normal',
'wtp_profile': 'test_value_34'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_wtp.fortios_wireless_controller(input_data, fos_instance)
delete_method_mock.assert_called_with('wireless-controller', 'wtp', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_wireless_controller_wtp_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_wtp': {
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour_profile': 'test_value_5',
'coordinate_enable': 'enable',
'coordinate_latitude': 'test_value_7',
'coordinate_longitude': 'test_value_8',
'coordinate_x': 'test_value_9',
'coordinate_y': 'test_value_10',
'image_download': 'enable',
'index': '12',
'ip_fragment_preventing': 'tcp-mss-adjust',
'led_state': 'enable',
'location': 'test_value_15',
'login_passwd': 'test_value_16',
'login_passwd_change': 'yes',
'mesh_bridge_enable': 'default',
'name': 'default_name_19',
'override_allowaccess': 'enable',
'override_ip_fragment': 'enable',
'override_lan': 'enable',
'override_led_state': 'enable',
'override_login_passwd_change': 'enable',
'override_split_tunnel': 'enable',
'override_wan_port_mode': 'enable',
'split_tunneling_acl_local_ap_subnet': 'enable',
'split_tunneling_acl_path': 'tunnel',
'tun_mtu_downlink': '29',
'tun_mtu_uplink': '30',
'wan_port_mode': 'wan-lan',
'wtp_id': 'test_value_32',
'wtp_mode': 'normal',
'wtp_profile': 'test_value_34'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_wtp.fortios_wireless_controller(input_data, fos_instance)
expected_data = {
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour-profile': 'test_value_5',
'coordinate-enable': 'enable',
'coordinate-latitude': 'test_value_7',
'coordinate-longitude': 'test_value_8',
'coordinate-x': 'test_value_9',
'coordinate-y': 'test_value_10',
'image-download': 'enable',
'index': '12',
'ip-fragment-preventing': 'tcp-mss-adjust',
'led-state': 'enable',
'location': 'test_value_15',
'login-passwd': 'test_value_16',
'login-passwd-change': 'yes',
'mesh-bridge-enable': 'default',
'name': 'default_name_19',
'override-allowaccess': 'enable',
'override-ip-fragment': 'enable',
'override-lan': 'enable',
'override-led-state': 'enable',
'override-login-passwd-change': 'enable',
'override-split-tunnel': 'enable',
'override-wan-port-mode': 'enable',
'split-tunneling-acl-local-ap-subnet': 'enable',
'split-tunneling-acl-path': 'tunnel',
'tun-mtu-downlink': '29',
'tun-mtu-uplink': '30',
'wan-port-mode': 'wan-lan',
'wtp-id': 'test_value_32',
'wtp-mode': 'normal',
'wtp-profile': 'test_value_34'
}
set_method_mock.assert_called_with('wireless-controller', 'wtp', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_wireless_controller_wtp_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_wtp': {
'random_attribute_not_valid': 'tag',
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour_profile': 'test_value_5',
'coordinate_enable': 'enable',
'coordinate_latitude': 'test_value_7',
'coordinate_longitude': 'test_value_8',
'coordinate_x': 'test_value_9',
'coordinate_y': 'test_value_10',
'image_download': 'enable',
'index': '12',
'ip_fragment_preventing': 'tcp-mss-adjust',
'led_state': 'enable',
'location': 'test_value_15',
'login_passwd': 'test_value_16',
'login_passwd_change': 'yes',
'mesh_bridge_enable': 'default',
'name': 'default_name_19',
'override_allowaccess': 'enable',
'override_ip_fragment': 'enable',
'override_lan': 'enable',
'override_led_state': 'enable',
'override_login_passwd_change': 'enable',
'override_split_tunnel': 'enable',
'override_wan_port_mode': 'enable',
'split_tunneling_acl_local_ap_subnet': 'enable',
'split_tunneling_acl_path': 'tunnel',
'tun_mtu_downlink': '29',
'tun_mtu_uplink': '30',
'wan_port_mode': 'wan-lan',
'wtp_id': 'test_value_32',
'wtp_mode': 'normal',
'wtp_profile': 'test_value_34'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_wtp.fortios_wireless_controller(input_data, fos_instance)
expected_data = {
'admin': 'discovered',
'allowaccess': 'telnet',
'bonjour-profile': 'test_value_5',
'coordinate-enable': 'enable',
'coordinate-latitude': 'test_value_7',
'coordinate-longitude': 'test_value_8',
'coordinate-x': 'test_value_9',
'coordinate-y': 'test_value_10',
'image-download': 'enable',
'index': '12',
'ip-fragment-preventing': 'tcp-mss-adjust',
'led-state': 'enable',
'location': 'test_value_15',
'login-passwd': 'test_value_16',
'login-passwd-change': 'yes',
'mesh-bridge-enable': 'default',
'name': 'default_name_19',
'override-allowaccess': 'enable',
'override-ip-fragment': 'enable',
'override-lan': 'enable',
'override-led-state': 'enable',
'override-login-passwd-change': 'enable',
'override-split-tunnel': 'enable',
'override-wan-port-mode': 'enable',
'split-tunneling-acl-local-ap-subnet': 'enable',
'split-tunneling-acl-path': 'tunnel',
'tun-mtu-downlink': '29',
'tun-mtu-uplink': '30',
'wan-port-mode': 'wan-lan',
'wtp-id': 'test_value_32',
'wtp-mode': 'normal',
'wtp-profile': 'test_value_34'
}
set_method_mock.assert_called_with('wireless-controller', 'wtp', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| {
"content_hash": "dee8b9791d76cdb94c56a6bf3be0796b",
"timestamp": "",
"source": "github",
"line_count": 493,
"max_line_length": 142,
"avg_line_length": 41.16227180527383,
"alnum_prop": 0.5542305228403883,
"repo_name": "thaim/ansible",
"id": "aea02faba54f66a2899a4208a2f5623920a2ca34",
"size": "20989",
"binary": false,
"copies": "19",
"ref": "refs/heads/fix-broken-link",
"path": "test/units/modules/network/fortios/test_fortios_wireless_controller_wtp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
} |
import unittest
import os
import paddle
import paddle.fluid as fluid
from paddle.fluid import core
import paddle.static as static
class Test_XPU_Places(unittest.TestCase):
def assert_places_equal(self, places0, places1):
self.assertEqual(len(places0), len(places1))
for place0, place1 in zip(places0, places1):
self.assertEqual(type(place0), type(place1))
self.assertEqual(place0.get_device_id(), place1.get_device_id())
def test_check_preset_envs(self):
if core.is_compiled_with_xpu():
os.environ["FLAGS_selected_xpus"] = "0"
place_list = static.xpu_places()
self.assert_places_equal([fluid.XPUPlace(0)], place_list)
def test_check_no_preset_envs(self):
if core.is_compiled_with_xpu():
place_list = static.xpu_places(0)
self.assert_places_equal([fluid.XPUPlace(0)], place_list)
if __name__ == '__main__':
paddle.enable_static()
unittest.main()
| {
"content_hash": "f1380aaed6f3855bb860749979b59ad8",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 76,
"avg_line_length": 32.9,
"alnum_prop": 0.646403242147923,
"repo_name": "luotao1/Paddle",
"id": "75679047301df4757a67097d9cf2f940d9bd1d1c",
"size": "1600",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/xpu/test_xpu_place.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36771446"
},
{
"name": "CMake",
"bytes": "903079"
},
{
"name": "Cuda",
"bytes": "5200715"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36248258"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553175"
}
],
"symlink_target": ""
} |
import re
import logging
from django import forms
from django.forms import ModelForm
from django.forms.models import BaseInlineFormSet
from django.forms.models import inlineformset_factory
from django.utils.translation import ugettext_lazy as _
from django.utils.functional import curry
from django.core.files.images import get_image_dimensions
from django.contrib.auth.models import Group
from django.core.exceptions import NON_FIELD_ERRORS, MultipleObjectsReturned
from journalmanager import models
from journalmanager import choices
from scielo_extensions import formfields as fields
from django.conf import settings
logger = logging.getLogger(__name__)
SPECIAL_ISSUE_FORM_FIELD_NUMBER = 'spe'
class UserCollectionContext(ModelForm):
"""
Inherit from this base class if you have a ``collections`` attribute
that needs to be contextualized with user collections.
"""
collections = forms.ModelMultipleChoiceField(models.Collection.objects.none(),
widget=forms.SelectMultiple(attrs={'title': _('Select one or more collections')}),
required=True)
def __init__(self, *args, **kwargs):
"""
Collection field queryset is overridden to display only
collections related to a user.
``collections_qset`` should not be passed to the superclass
``__init__`` method.
"""
collections_qset = kwargs.pop('collections_qset', None)
super(UserCollectionContext, self).__init__(*args, **kwargs)
if collections_qset is not None:
self.fields['collections'].queryset = models.Collection.objects.filter(
pk__in=(collection.collection.pk for collection in collections_qset))
class AheadForm(ModelForm):
class Meta():
model = models.Journal
fields = ('previous_ahead_documents', 'current_ahead_documents')
widgets = {
'previous_ahead_documents': forms.TextInput(attrs={'class': 'input-small'}),
'current_ahead_documents': forms.TextInput(attrs={'class': 'input-small'}),
}
class JournalForm(ModelForm):
print_issn = fields.ISSNField(max_length=9, required=False)
eletronic_issn = fields.ISSNField(max_length=9, required=False)
languages = forms.ModelMultipleChoiceField(models.Language.objects.all(),
widget=forms.SelectMultiple(attrs={'title': _('Select one or more languages')}),
required=True)
abstract_keyword_languages = forms.ModelMultipleChoiceField(models.Language.objects.all(),
widget=forms.SelectMultiple(attrs={'title': _('Select one or more languages')}),
required=True)
sponsor = forms.ModelMultipleChoiceField(models.Sponsor.objects.all(),
widget=forms.SelectMultiple(attrs={'title': _('Select one or more sponsors')}),
required=True)
subject_categories = forms.ModelMultipleChoiceField(models.SubjectCategory.objects.all(),
widget=forms.SelectMultiple(attrs={'title': _('Select one or more categories')}),
required=False)
study_areas = forms.ModelMultipleChoiceField(models.StudyArea.objects.all(),
widget=forms.SelectMultiple(attrs={'title': _('Select one or more study area')}),
required=True)
regex = re.compile(r'^(1|2)\d{3}$')
collection = forms.ModelChoiceField(models.Collection.objects.none(),
required=True)
def __init__(self, *args, **kwargs):
collections_qset = kwargs.pop('collections_qset', None)
super(JournalForm, self).__init__(*args, **kwargs)
if collections_qset is not None:
self.fields['collection'].queryset = models.Collection.objects.filter(
pk__in=(collection.collection.pk for collection in collections_qset))
def save_all(self, creator):
journal = self.save(commit=False)
if self.instance.pk is None:
journal.creator = creator
if not journal.pub_status_changed_by_id:
journal.pub_status_changed_by = creator
journal.save()
self.save_m2m()
return journal
def clean(self):
cleaned_data = self.cleaned_data
print_issn = cleaned_data.get("print_issn")
eletronic_issn = cleaned_data.get("eletronic_issn")
if not (print_issn or eletronic_issn):
msg = u'Eletronic ISSN or Print ISSN must be filled.'
self._errors['scielo_issn'] = self.error_class([msg])
return cleaned_data
def clean_acronym(self):
return self.cleaned_data["acronym"].lower()
def clean_init_year(self):
if self.cleaned_data["init_year"]:
result = self.regex.match(self.cleaned_data["init_year"])
if result is None:
raise forms.ValidationError(u'Invalid Date')
return self.cleaned_data["init_year"]
def clean_final_year(self):
if self.cleaned_data["final_year"]:
result = self.regex.match(self.cleaned_data["final_year"])
if result is None:
raise forms.ValidationError(u'Invalid Date')
return self.cleaned_data["final_year"]
def clean_cover(self):
if self.cleaned_data['cover']:
if not self.cleaned_data['cover'].name:
if not self.cleaned_data['cover'].content_type in settings.IMAGE_CONTENT_TYPE:
raise forms.ValidationError(u'File type is not supported')
if self.cleaned_data['cover'].size > settings.IMAGE_SIZE:
raise forms.ValidationError(u'File size not allowed')
w, h = get_image_dimensions(self.cleaned_data['cover'])
if w != settings.IMAGE_DIMENSIONS['width_cover']:
raise forms.ValidationError("The image is %ipx pixel wide. It's supposed to be %spx" % (w, settings.IMAGE_DIMENSIONS['width_cover']))
if h != settings.IMAGE_DIMENSIONS['height_cover']:
raise forms.ValidationError("The image is %ipx pixel high. It's supposed to be %spx" % (h, settings.IMAGE_DIMENSIONS['height_cover']))
return self.cleaned_data['cover']
def clean_logo(self):
if self.cleaned_data['logo']:
if not self.cleaned_data['logo'].name:
if not self.cleaned_data['logo'].content_type in settings.IMAGE_CONTENT_TYPE:
raise forms.ValidationError(u'File type is not supported')
if self.cleaned_data['logo'].size > settings.IMAGE_SIZE:
raise forms.ValidationError(u'File size not allowed')
w, h = get_image_dimensions(self.cleaned_data['logo'])
if w != settings.IMAGE_DIMENSIONS['width_logo']:
raise forms.ValidationError("The image is %ipx pixel wide. It's supposed to be %spx" % (w, settings.IMAGE_DIMENSIONS['width_logo']))
if h != settings.IMAGE_DIMENSIONS['height_logo']:
raise forms.ValidationError("The image is %ipx pixel high. It's supposed to be %spx" % (h, settings.IMAGE_DIMENSIONS['height_logo']))
return self.cleaned_data['logo']
class Meta:
model = models.Journal
exclude = ('pub_status', 'pub_status_changed_by')
#Overriding the default field types or widgets
widgets = {
'title': forms.TextInput(attrs={'class': 'span9'}),
'title_iso': forms.TextInput(attrs={'class': 'span9'}),
'short_title': forms.TextInput(attrs={'class': 'span9'}),
'previous_title': forms.Select(attrs={'class': 'span9'}),
'acronym': forms.TextInput(attrs={'class': 'span2'}),
'scielo_issn': forms.Select(attrs={'class': 'span3'}),
'subject_descriptors': forms.Textarea(attrs={'class': 'span9'}),
'init_year': forms.TextInput(attrs={'class': 'datepicker', 'id': 'datepicker0'}),
'init_vol': forms.TextInput(attrs={'class': 'span2'}),
'init_num': forms.TextInput(attrs={'class': 'span2'}),
'final_year': forms.TextInput(attrs={'class': 'datepicker', 'id': 'datepicker1'}),
'final_vol': forms.TextInput(attrs={'class': 'span2'}),
'final_num': forms.TextInput(attrs={'class': 'span2'}),
'url_main_collection': forms.TextInput(attrs={'class': 'span9'}),
'url_online_submission': forms.TextInput(attrs={'class': 'span9'}),
'url_journal': forms.TextInput(attrs={'class': 'span9'}),
'notes': forms.Textarea(attrs={'class': 'span9'}),
'editorial_standard': forms.Select(attrs={'class': 'span3'}),
'copyrighter': forms.TextInput(attrs={'class': 'span8'}),
'index_coverage': forms.Textarea(attrs={'class': 'span9'}),
'other_previous_title': forms.TextInput(attrs={'class': 'span9'}),
'editor_address': forms.TextInput(attrs={'class': 'span9'}),
'publisher_name': forms.TextInput(attrs={'class': 'span9'}),
}
class CollectionForm(ModelForm):
def __init__(self, *args, **kwargs):
super(CollectionForm, self).__init__(*args, **kwargs)
class Meta:
model = models.Collection
exclude = ('collection', )
class SponsorForm(UserCollectionContext):
def __init__(self, *args, **kwargs):
super(SponsorForm, self).__init__(*args, **kwargs)
class Meta:
model = models.Sponsor
exclude = ('acronym', 'country', 'state', 'city', 'address_number', 'address_complement',
'zip_code', 'phone', 'fax', 'cel')
widgets = {
'name': forms.TextInput(attrs={'class': 'span6'}),
'complement': forms.Textarea(attrs={'class': 'span6'}),
'address': forms.Textarea(attrs={'class': 'span6'}),
'email': forms.TextInput(attrs={'class': 'span6'}),
}
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class': 'input-xlarge focused span4', 'placeholder': _('Username or email')}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'input-xlarge focused span4', 'placeholder': _('Password')}))
class UserForm(ModelForm):
groups = forms.ModelMultipleChoiceField(Group.objects.all(),
widget=forms.SelectMultiple(attrs={'title': _('Select one or more groups')}),
required=False)
class Meta:
model = models.User
exclude = ('is_staff', 'is_superuser', 'last_login', 'date_joined',
'user_permissions', 'email', 'password', 'is_active')
def save(self, commit=True):
user = super(UserForm, self).save(commit=False)
#user.set_password(self.cleaned_data["password"])
if commit:
user.save()
self.save_m2m()
return user
class EventJournalForm(forms.Form):
pub_status = forms.ChoiceField(widget=forms.Select, choices=choices.JOURNAL_PUBLICATION_STATUS)
pub_status_reason = forms.CharField(widget=forms.Textarea)
class IssueBaseForm(forms.ModelForm):
section = forms.ModelMultipleChoiceField(
models.Section.objects.none(),
widget=forms.SelectMultiple(attrs={'title': _('Select one or more sections')}),
required=False)
class Meta:
model = models.Issue
fields = ('section', 'volume', 'publication_start_month',
'publication_end_month', 'publication_year', 'is_marked_up',
'use_license', 'total_documents', 'ctrl_vocabulary',
'editorial_standard', 'cover')
def __init__(self, *args, **kwargs):
"""
Base class for all Issue kinds of forms.
:param querysets: (kwarg) a dict relating a field and a queryset.
:param params: (kwarg) a dict of arbitrary params, relevant to
subclasses only.
"""
# discarting optional params, if present.
params = kwargs.pop('params', None)
querysets = kwargs.pop('querysets', None)
super(IssueBaseForm, self).__init__(*args, **kwargs)
if querysets:
for qset in querysets:
self.fields[qset].queryset = querysets[qset]
class RegularIssueForm(IssueBaseForm):
class Meta(IssueBaseForm.Meta):
fields = ('publication_year', 'volume', 'number', 'publication_start_month',
'publication_end_month', 'is_marked_up', 'use_license', 'total_documents',
'ctrl_vocabulary', 'editorial_standard', 'section', 'cover',)
def __init__(self, *args, **kwargs):
params = kwargs.pop('params', {})
if 'journal' not in params:
raise TypeError('RegularIssueForm() takes journal in params keyword argument. e.g: params={"journal":<journal>')
else:
self.journal = params['journal']
super(RegularIssueForm, self).__init__(*args, **kwargs)
def clean(self):
volume = self.cleaned_data.get('volume')
number = self.cleaned_data.get('number')
publication_year = self.cleaned_data.get('publication_year')
if not (volume or number):
raise forms.ValidationError(
_('You must complete at least one of two fields volume or number.'))
try:
issue = models.Issue.objects.get(number=number,
volume=volume,
publication_year=publication_year,
journal=self.journal.pk)
except models.Issue.DoesNotExist:
# Perfect! A brand new issue!
pass
except MultipleObjectsReturned as e:
logger.error('''
Multiple issues returned for the same number, volume and year for one journal.
Traceback: %s'''.strip() % e.message)
raise forms.ValidationError({NON_FIELD_ERRORS: _('Issue with this Year and (Volume or Number) already exists for this Journal.')})
else:
# Issue already exists (handling updates).
if self.instance is None or (self.instance.pk != issue.pk):
raise forms.ValidationError({NON_FIELD_ERRORS:\
_('Issue with this Year and (Volume or Number) already exists for this Journal.')})
return self.cleaned_data
class SupplementIssueForm(IssueBaseForm):
suppl_type = forms.ChoiceField(choices=choices.ISSUE_SUPPL_TYPE, widget=forms.RadioSelect, initial='volume')
class Meta(IssueBaseForm.Meta):
fields = ('publication_year', 'suppl_type', 'volume', 'number', 'suppl_text',
'publication_start_month', 'publication_end_month', 'is_marked_up',
'use_license', 'total_documents', 'ctrl_vocabulary', 'editorial_standard',
'section', 'cover',)
def __init__(self, *args, **kwargs):
params = kwargs.pop('params', {})
if 'journal' not in params:
raise TypeError('SupplementIssueForm() takes journal in params keyword argument. e.g: params={"journal":<journal>')
else:
self.journal = params['journal']
super(SupplementIssueForm, self).__init__(*args, **kwargs)
def clean(self):
volume = self.cleaned_data.get('volume', '')
number = self.cleaned_data.get('number', '')
suppl_type = self.cleaned_data.get('suppl_type')
publication_year = self.cleaned_data.get('publication_year')
suppl_text = self.cleaned_data.get('suppl_text')
if suppl_type == 'volume' and (volume == '' or number != ''):
raise forms.ValidationError(_('You must complete the volume filed. Number field must be empty.'))
elif suppl_type == 'number' and (number == ''):
raise forms.ValidationError(_('You must complete the number filed. Volume field must be empty.'))
else:
try:
issue = models.Issue.objects.get(volume=volume,
number=number,
publication_year=publication_year,
suppl_text=suppl_text,
journal=self.journal)
except models.Issue.DoesNotExist:
# Perfect! A brand new issue!
pass
except MultipleObjectsReturned as e:
logger.error('''
Multiple issues returned for the same number, volume and year for one journal.
Traceback: %s'''.strip() % e.message)
raise forms.ValidationError({NON_FIELD_ERRORS: _('Issue with this Year and (Volume or Number) already exists for this Journal.')})
else:
# Issue already exists (handling updates).
if self.instance is None or (self.instance.pk != issue.pk):
raise forms.ValidationError({NON_FIELD_ERRORS:\
_('Issue with this Year and (Volume or Number) already exists for this Journal.')})
return self.cleaned_data
class SpecialIssueForm(RegularIssueForm):
number = forms.CharField(required=True, initial=SPECIAL_ISSUE_FORM_FIELD_NUMBER, widget=forms.TextInput(attrs={'readonly':'readonly'}))
class Meta(RegularIssueForm.Meta):
exclude = ('number',)
def __init__(self, *args, **kwargs):
# RegularIssueForm expects 'params' is present in kwargs
params = kwargs.get('params', {})
if 'journal' not in params:
raise TypeError('SpecialIssueForm() takes journal in params keyword argument. e.g: params={"journal":<journal>')
else:
self.journal = params['journal']
super(SpecialIssueForm, self).__init__(*args, **kwargs)
def clean_number(self):
# override the number value
return SPECIAL_ISSUE_FORM_FIELD_NUMBER
###########################################
# Section
###########################################
class SectionTitleForm(ModelForm):
title = forms.CharField(widget=forms.TextInput(attrs={'class': 'checked_trans'}))
class Meta:
model = models.SectionTitle
fields = ('title', 'language',)
class SectionForm(ModelForm):
def __init__(self, *args, **kwargs):
super(SectionForm, self).__init__(*args, **kwargs)
instance = getattr(self, 'instance', None)
self.fields['legacy_code'].widget.attrs['readonly'] = True
def clean_code(self):
return self.instance.legacy_code
def save_all(self, journal):
section = self.save(commit=False)
section.journal = journal
section.save()
return section
class Meta:
model = models.Section
exclude = ('journal', 'code')
def get_all_section_forms(post_dict, journal, section):
"""
Get all forms/formsets used by the Section form.
:Parameters:
- `post_dict`: The POST querydict, even if it is empty
- `journal`: The journal instance the section is part of
- `section`: The section instance bound to the form. Must be
a new instance when creating an empty form
"""
args = []
kwargs = {}
if section:
kwargs['instance'] = section
if post_dict:
args.append(post_dict)
section_title_formset = inlineformset_factory(models.Section,
models.SectionTitle, form=SectionTitleForm, extra=1,
can_delete=True, formset=FirstFieldRequiredFormSet)
d = {
'section_form': SectionForm(*args, **kwargs),
'section_title_formset': section_title_formset(prefix='titles',
*args, **kwargs),
}
return d
###########################################
# Press Release
###########################################
class RegularPressReleaseForm(ModelForm):
def __init__(self, *args, **kwargs):
"""
``journal`` should not be passed to the superclass
``__init__`` method.
"""
self.journal = kwargs.pop('journal', None)
super(RegularPressReleaseForm, self).__init__(*args, **kwargs)
if not self.journal:
raise TypeError('missing journal argument')
self.fields['issue'].queryset = models.Issue.objects.available().filter(
journal__pk=self.journal.pk)
class Meta:
model = models.RegularPressRelease
class AheadPressReleaseForm(ModelForm):
class Meta:
model = models.AheadPressRelease
exclude = ('journal',)
class PressReleaseTranslationForm(ModelForm):
def __init__(self, *args, **kwargs):
"""
``journal`` should not be passed to the superclass
``__init__`` method.
"""
self.journal = kwargs.pop('journal', None)
super(PressReleaseTranslationForm, self).__init__(*args, **kwargs)
if not self.journal:
raise TypeError('missing journal argument')
self.fields['language'].queryset = models.Language.objects.filter(
journal__pk=self.journal.pk)
class Meta:
model = models.PressReleaseTranslation
class PressReleaseArticleForm(ModelForm):
class Meta:
model = models.PressReleaseArticle
class AheadPressReleaseArticleForm(ModelForm):
article_pid = forms.CharField(required=True)
class Meta:
model = models.PressReleaseArticle
def clean_article_pid(self):
if not self.cleaned_data['article_pid']:
raise forms.ValidationError('Field is required')
return self.cleaned_data['article_pid']
def get_all_pressrelease_forms(post_dict, journal, pressrelease):
"""
Get all forms/formsets used by the PressRelease form.
:Parameters:
- ``post_dict``: The POST querydict, even if it is empty
- ``journal``: The journal instance the press-release is part of
- ``pressrelease``: The instance bound to the form. Must be
a new instance when creating an empty form
"""
args = []
kwargs = {}
if pressrelease:
kwargs['instance'] = pressrelease
if post_dict:
args.append(post_dict)
translations_formset = inlineformset_factory(
models.PressRelease,
models.PressReleaseTranslation,
form=PressReleaseTranslationForm,
extra=1,
can_delete=True,
formset=FirstFieldRequiredFormSet)
translations_formset.form = staticmethod(
curry(PressReleaseTranslationForm, journal=journal))
article_formset = inlineformset_factory(
models.PressRelease,
models.PressReleaseArticle,
form=PressReleaseArticleForm,
extra=1,
can_delete=True)
d = {
'pressrelease_form': RegularPressReleaseForm(journal=journal,
*args,
**kwargs),
'translation_formset': translations_formset(prefix='translation',
*args,
**kwargs),
'article_formset': article_formset(prefix='article',
*args,
**kwargs),
}
return d
def get_all_ahead_pressrelease_forms(post_dict, journal, pressrelease):
"""
Get all forms/formsets used by the AheadPressRelease form.
:Parameters:
- ``post_dict``: The POST querydict, even if it is empty
- ``journal``: The journal instance the press-release is part of
- ``pressrelease``: The instance bound to the form. Must be
a new instance when creating an empty form
"""
args = []
kwargs = {}
if pressrelease:
kwargs['instance'] = pressrelease
if post_dict:
args.append(post_dict)
translations_formset = inlineformset_factory(
models.PressRelease,
models.PressReleaseTranslation,
form=PressReleaseTranslationForm,
extra=1,
can_delete=True,
formset=FirstFieldRequiredFormSet)
translations_formset.form = staticmethod(
curry(PressReleaseTranslationForm, journal=journal))
article_formset = inlineformset_factory(
models.PressRelease,
models.PressReleaseArticle,
form=AheadPressReleaseArticleForm,
extra=1,
can_delete=True,
formset=FirstFieldRequiredFormSet)
d = {
'pressrelease_form': AheadPressReleaseForm(*args,
**kwargs),
'translation_formset': translations_formset(prefix='translation',
*args,
**kwargs),
'article_formset': article_formset(prefix='article',
*args,
**kwargs),
}
return d
class UserCollectionsForm(ModelForm):
def __init__(self, *args, **kwargs):
"""
Collection field queryset is overridden to display only
collections managed by the given user.
``user`` should not be passed to the superclass
``__init__`` method.
"""
self._user = kwargs.pop('user', None)
super(UserCollectionsForm, self).__init__(*args, **kwargs)
if self._user:
self.fields['collection'].queryset = models.Collection.objects.get_managed_by_user(self._user)
class Meta:
model = models.UserCollections
exclude = ('is_default', )
widgets = {
'collection': forms.Select(attrs={'class': 'span8'}),
}
class JournalMissionForm(ModelForm):
class Meta:
model = models.JournalMission
widgets = {
'description': forms.Textarea(attrs={'class': 'span6', 'rows': '3'}),
}
class JournalTitleForm(ModelForm):
class Meta:
model = models.JournalTitle
widgets = {
'title': forms.TextInput(attrs={'class': 'span6'}),
}
class IssueTitleForm(ModelForm):
class Meta:
model = models.IssueTitle
widgets = {
'title': forms.TextInput(attrs={'class': 'span12'}),
}
# ## Formsets ##
class FirstFieldRequiredFormSet(BaseInlineFormSet):
"""
Formset class that makes the first item required in edit and create form.
Usage: ABCFormSet = inlineformset_factory(models.Wrappee, models.Wrapped,
extra=1, formset=FirstFieldRequiredFormSet)
"""
def clean(self):
super(FirstFieldRequiredFormSet, self).clean()
count = 0
for form in self.forms:
try:
if form.cleaned_data and not form.cleaned_data.get('DELETE', False):
count += 1
break
except AttributeError:
pass
if count < 1:
raise forms.ValidationError(_('Please fill in at least one form'))
| {
"content_hash": "999579d1e5e744299f047bce8efc712a",
"timestamp": "",
"source": "github",
"line_count": 732,
"max_line_length": 150,
"avg_line_length": 36.576502732240435,
"alnum_prop": 0.6011055501606035,
"repo_name": "jfunez/scielo-manager",
"id": "38b520ddfa67c34cbcd0602b413f7ea5c37f0494",
"size": "26790",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scielomanager/journalmanager/forms.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "109235"
},
{
"name": "JavaScript",
"bytes": "311673"
},
{
"name": "Python",
"bytes": "3230886"
},
{
"name": "Shell",
"bytes": "5834"
}
],
"symlink_target": ""
} |
"""Tests for lingvo.core.entmax."""
from lingvo import compat as tf
from lingvo.core import entmax
from lingvo.core import test_utils
class EntmaxTest(test_utils.TestCase):
# All expected values are generated based on official implementation.
def test_entmax_support_generate_right_probability(self):
inputs = tf.constant([[0.5, 1.0, 2.0]] * 3)
expected_prob = tf.constant([[0.02328045, 0.16207013, 0.8146494]] * 3)
entmax_prob = entmax.entmax_support(inputs, 1.5, -1)
with self.session(use_gpu=False) as sess:
output = sess.run(entmax_prob)
self.assertAllClose(expected_prob, output)
def test_entmax_loss_generate_right_loss(self):
inputs = tf.constant([[[0.5, 1.0, 2.0]] * 3], dtype='bfloat16')
labels = tf.constant([[0, 1, 2]])
# Convert to the matrix with given depth, e.g. the vocabulary size.
labels = tf.one_hot(labels, depth=3)
expected_loss = tf.constant([[1.5642307, 1.0642307, 0.06423065]],
dtype='bfloat16')
entmax_loss_val = entmax.entmax_loss(labels, inputs, alpha=1.5)
with self.session(use_gpu=False) as sess:
output = sess.run(entmax_loss_val)
self.assertAllClose(expected_loss, output)
def test_entmax_loss_generate_right_gradient(self):
inputs = tf.constant([[0.5, 1.0, 2.0]] * 3)
labels = tf.constant([0, 1, 2])
expected_loss_gradient = tf.constant([[[-0.97671956, 0.16207013, 0.8146494],
[0.02328045, -0.83792984, 0.8146494],
[0.02328045, 0.16207013,
-0.1853506]]])
# Convert to the matrix with given depth, e.g. the vocabulary size.
labels = tf.one_hot(labels, depth=3)
expected_loss = tf.constant(2.692692)
entmax_loss_val = tf.reduce_sum(entmax.entmax_loss(labels, inputs, 1.5))
entmax_loss_gradient_val = tf.gradients(entmax_loss_val, inputs)
with self.session(use_gpu=False) as sess:
loss_output = sess.run(entmax_loss_val)
gradient_output = sess.run(entmax_loss_gradient_val)
self.assertAllClose(expected_loss, loss_output)
self.assertAllClose(expected_loss_gradient, gradient_output)
if __name__ == '__main__':
test_utils.main()
| {
"content_hash": "959b1325d032ab4622f70ceb570a0370",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 80,
"avg_line_length": 43.57692307692308,
"alnum_prop": 0.6345984112974404,
"repo_name": "tensorflow/lingvo",
"id": "4633328d92f1ad375f85a0c40c0e02a0979c4982",
"size": "2955",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lingvo/core/entmax_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "5163"
},
{
"name": "C++",
"bytes": "556344"
},
{
"name": "Dockerfile",
"bytes": "8484"
},
{
"name": "Jupyter Notebook",
"bytes": "36721"
},
{
"name": "Python",
"bytes": "9574124"
},
{
"name": "Shell",
"bytes": "50408"
},
{
"name": "Starlark",
"bytes": "182688"
},
{
"name": "TeX",
"bytes": "37275"
}
],
"symlink_target": ""
} |
"""
The MIT License (MIT)
Copyright (c) 2014 NTHUOJ team
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from datetime import date, datetime, timedelta
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager
from django.db import models
from utils.config_info import get_config_items, get_config
# Create your models here.
class UserManager(BaseUserManager):
def create_user(self, username, password=None):
"""
Creates and saves a User with the given username and password.
"""
user = self.model(
username=username,
password=password
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, username, password):
"""
Creates and saves a Superser with the given username and password.
"""
user = self.create_user(username=username, password=password)
user.is_admin = True
user.is_active = True
user.save(using=self._db)
return user
class User(AbstractBaseUser):
ADMIN = 'ADMIN'
JUDGE = 'JUDGE'
SUB_JUDGE = 'SUB_JUDGE'
USER = 'USER'
USER_LEVEL_CHOICE = (
(ADMIN, 'Admin'),
(JUDGE, 'Judge'),
(SUB_JUDGE, 'Sub-judge'),
(USER, 'User'),
)
THEME_CHOICE = tuple(get_config_items('web_theme'))
DEFAULT_THEME = get_config('theme_settings', 'default')
username = models.CharField(
max_length=15, default='', unique=True, primary_key=True)
email = models.CharField(max_length=100, default='')
register_date = models.DateField(default=date.today, auto_now_add=True)
user_level = models.CharField(
max_length=9, choices=USER_LEVEL_CHOICE, default=USER)
theme = models.CharField(
max_length=10, choices=THEME_CHOICE, default=DEFAULT_THEME)
USERNAME_FIELD = 'username'
is_active = models.BooleanField(default=False)
is_admin = models.BooleanField(default=False)
objects = UserManager()
def has_admin_auth(self):
has_auth = (self.user_level == self.ADMIN)
return has_auth
def has_judge_auth(self):
has_auth = (
(self.user_level == self.ADMIN) or (self.user_level == self.JUDGE))
return has_auth
def has_subjudge_auth(self):
has_auth = ((self.user_level == self.ADMIN) or (self.user_level == self.JUDGE)
or (self.user_level == self.SUB_JUDGE))
return has_auth
def get_full_name(self):
return self.username
def get_short_name(self):
return self.username
def has_perm(self, perm, obj=None):
# Simplest possible answer: Yes, always (To be constructed later)
return True
def has_module_perms(self, app_label):
# Simplest possible answer: Yes, always (To be constructed later)
return True
def __unicode__(self):
return self.username
@property
def is_superuser(self):
return self.is_admin
@property
def is_staff(self):
return self.is_admin
class Notification(models.Model):
receiver = models.ForeignKey(User)
message = models.TextField(null=True)
read = models.BooleanField(default=False)
def __unicode__(self):
return str(self.id)
class UserProfile(models.Model):
user = models.OneToOneField(User)
activation_key = models.CharField(max_length=40, blank=True)
# default active time is 15 minutes
active_time = models.DateTimeField(
default=lambda: datetime.now() + timedelta(minutes=15))
def __unicode__(self):
return self.user.username
class Meta:
verbose_name_plural = u'User profiles'
| {
"content_hash": "d393ec362a7c13d39cec1ede192b9027",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 86,
"avg_line_length": 31.44295302013423,
"alnum_prop": 0.671504802561366,
"repo_name": "geniusgordon/NTHUOJ_web",
"id": "9af5a879dbd6d789294add8bccba841710262608",
"size": "4685",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "users/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17559"
},
{
"name": "HTML",
"bytes": "121275"
},
{
"name": "JavaScript",
"bytes": "53271"
},
{
"name": "Python",
"bytes": "240925"
}
],
"symlink_target": ""
} |
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class AddInterface(forms.SelfHandlingForm):
subnet_id = forms.ChoiceField(label=_("Subnet"))
ip_address = forms.IPField(
label=_("IP Address (optional)"), required=False, initial="",
help_text=_("You can specify an IP address of the interface "
"created if you want (e.g. 192.168.0.254)."),
version=forms.IPv4 | forms.IPv6, mask=False)
router_name = forms.CharField(label=_("Router Name"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
router_id = forms.CharField(label=_("Router ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
failure_url = 'horizon:project:routers:detail'
def __init__(self, request, *args, **kwargs):
super(AddInterface, self).__init__(request, *args, **kwargs)
c = self.populate_subnet_id_choices(request)
self.fields['subnet_id'].choices = c
def populate_subnet_id_choices(self, request):
tenant_id = self.request.user.tenant_id
networks = []
try:
networks = api.neutron.network_list_for_tenant(request, tenant_id)
except Exception as e:
msg = _('Failed to get network list %s') % e.message
LOG.info(msg)
messages.error(request, msg)
redirect = reverse(self.failure_url,
args=[request.REQUEST['router_id']])
exceptions.handle(request, msg, redirect=redirect)
return
choices = []
for n in networks:
net_name = n.name + ': ' if n.name else ''
choices += [(subnet.id,
'%s%s (%s)' % (net_name, subnet.cidr,
subnet.name or subnet.id))
for subnet in n['subnets']]
if choices:
choices.insert(0, ("", _("Select Subnet")))
else:
choices.insert(0, ("", _("No subnets available")))
return choices
def handle(self, request, data):
if data['ip_address']:
port = self._add_interface_by_port(request, data)
else:
port = self._add_interface_by_subnet(request, data)
msg = _('Interface added')
if port:
msg += ' ' + port.fixed_ips[0]['ip_address']
LOG.debug(msg)
messages.success(request, msg)
return True
def _add_interface_by_subnet(self, request, data):
router_id = data['router_id']
try:
router_inf = api.neutron.router_add_interface(
request, router_id, subnet_id=data['subnet_id'])
except Exception as e:
self._handle_error(request, router_id, e)
try:
port = api.neutron.port_get(request, router_inf['port_id'])
except Exception:
# Ignore an error when port_get() since it is just
# to get an IP address for the interface.
port = None
return port
def _add_interface_by_port(self, request, data):
router_id = data['router_id']
subnet_id = data['subnet_id']
try:
subnet = api.neutron.subnet_get(request, subnet_id)
except Exception:
msg = _('Unable to get subnet "%s"') % subnet_id
self._handle_error(request, router_id, msg)
try:
ip_address = data['ip_address']
body = {'network_id': subnet.network_id,
'fixed_ips': [{'subnet_id': subnet.id,
'ip_address': ip_address}]}
port = api.neutron.port_create(request, **body)
except Exception as e:
self._handle_error(request, router_id, e)
try:
api.neutron.router_add_interface(request, router_id,
port_id=port.id)
except Exception as e:
self._delete_port(request, port)
self._handle_error(request, router_id, e)
return port
def _handle_error(self, request, router_id, reason):
msg = _('Failed to add_interface: %s') % reason
LOG.info(msg)
redirect = reverse(self.failure_url, args=[router_id])
exceptions.handle(request, msg, redirect=redirect)
def _delete_port(self, request, port):
try:
api.neutron.port_delete(request, port.id)
except Exception:
msg = _('Failed to delete port %s') % port.id
LOG.info(msg)
exceptions.handle(request, msg)
class SetGatewayForm(forms.SelfHandlingForm):
network_id = forms.ChoiceField(label=_("External Network"))
router_name = forms.CharField(label=_("Router Name"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
router_id = forms.CharField(label=_("Router ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(SetGatewayForm, self).__init__(request, *args, **kwargs)
c = self.populate_network_id_choices(request)
self.fields['network_id'].choices = c
def populate_network_id_choices(self, request):
search_opts = {'router:external': True}
try:
networks = api.neutron.network_list(request, **search_opts)
except Exception as e:
msg = _('Failed to get network list %s') % e.message
LOG.info(msg)
messages.error(request, msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return
choices = [(network.id, network.name or network.id)
for network in networks]
if choices:
choices.insert(0, ("", _("Select network")))
else:
choices.insert(0, ("", _("No networks available")))
return choices
def handle(self, request, data):
try:
api.neutron.router_add_gateway(request,
data['router_id'],
data['network_id'])
msg = _('Gateway interface is added')
LOG.debug(msg)
messages.success(request, msg)
return True
except Exception as e:
msg = _('Failed to set gateway %s') % e.message
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
| {
"content_hash": "cbab9df2f5e81b8b179afbef0e51257e",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 78,
"avg_line_length": 40.31609195402299,
"alnum_prop": 0.5415538132573058,
"repo_name": "spring-week-topos/horizon-week",
"id": "c783d5306f747d66c31443dea968b3a821b081a0",
"size": "7682",
"binary": false,
"copies": "4",
"ref": "refs/heads/spring-week",
"path": "openstack_dashboard/dashboards/project/routers/ports/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "181499"
},
{
"name": "JavaScript",
"bytes": "1563050"
},
{
"name": "Python",
"bytes": "3099859"
},
{
"name": "Shell",
"bytes": "14643"
}
],
"symlink_target": ""
} |
from pyshorteners import Shortener
url = 'http://www.google.com'
shortener = Shortener('Tinyurl')
print("My short url is {}".format(shortener.short(url))) | {
"content_hash": "d12ba97aa5d4dd05a2cf59a812336388",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 56,
"avg_line_length": 31,
"alnum_prop": 0.7419354838709677,
"repo_name": "harrymt/boeing-hackathon",
"id": "6f55a12271bf6ab9ad3d7695fcb5d0b901ca5b8e",
"size": "155",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shorten_url.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "113"
},
{
"name": "HTML",
"bytes": "2345"
},
{
"name": "JavaScript",
"bytes": "4593"
},
{
"name": "Python",
"bytes": "18719"
}
],
"symlink_target": ""
} |
from unittest import TestCase
from chatterbot.adapters.storage import JsonDatabaseAdapter
from chatterbot.conversation import Statement, Response
class JsonAdapterTestCase(TestCase):
def setUp(self):
"""
Instantiate the adapter.
"""
from random import randint
# Generate a random name for the database
database_name = str(randint(0, 9000))
self.adapter = JsonDatabaseAdapter(database=database_name)
def tearDown(self):
"""
Remove the test database.
"""
self.adapter.drop()
class JsonDatabaseAdapterTestCase(JsonAdapterTestCase):
def test_count_returns_zero(self):
"""
The count method should return a value of 0
when nothing has been saved to the database.
"""
self.assertEqual(self.adapter.count(), 0)
def test_count_returns_value(self):
"""
The count method should return a value of 1
when one item has been saved to the database.
"""
statement = Statement("Test statement")
self.adapter.update(statement)
self.assertEqual(self.adapter.count(), 1)
def test_statement_not_found(self):
"""
Test that None is returned by the find method
when a matching statement is not found.
"""
self.assertEqual(self.adapter.find("Non-existant"), None)
def test_statement_found(self):
"""
Test that a matching statement is returned
when it exists in the database.
"""
statement = Statement("New statement")
self.adapter.update(statement)
found_statement = self.adapter.find("New statement")
self.assertNotEqual(found_statement, None)
self.assertEqual(found_statement.text, statement.text)
def test_update_adds_new_statement(self):
statement = Statement("New statement")
self.adapter.update(statement)
statement_found = self.adapter.find("New statement")
self.assertNotEqual(statement_found, None)
self.assertEqual(statement_found.text, statement.text)
def test_update_modifies_existing_statement(self):
statement = Statement("New statement")
self.adapter.update(statement)
# Check the initial values
found_statement = self.adapter.find(statement.text)
self.assertEqual(
len(found_statement.in_response_to), 0
)
# Update the statement value
statement.add_response(
Statement("New response")
)
self.adapter.update(statement)
# Check that the values have changed
found_statement = self.adapter.find(statement.text)
self.assertEqual(
len(found_statement.in_response_to), 1
)
def test_get_random_returns_statement(self):
statement = Statement("New statement")
self.adapter.update(statement)
random_statement = self.adapter.get_random()
self.assertEqual(random_statement.text, statement.text)
def test_find_returns_nested_responses(self):
response_list = [
Response("Yes"),
Response("No")
]
statement = Statement(
"Do you like this?",
in_response_to=response_list
)
self.adapter.update(statement)
result = self.adapter.find(statement.text)
self.assertIn("Yes", result.in_response_to)
self.assertIn("No", result.in_response_to)
def test_multiple_responses_added_on_update(self):
statement = Statement(
"You are welcome.",
in_response_to=[
Response("Thank you."),
Response("Thanks.")
]
)
self.adapter.update(statement)
result = self.adapter.find(statement.text)
self.assertEqual(len(result.in_response_to), 2)
self.assertIn(statement.in_response_to[0], result.in_response_to)
self.assertIn(statement.in_response_to[1], result.in_response_to)
def test_update_saves_statement_with_multiple_responses(self):
statement = Statement(
"You are welcome.",
in_response_to=[
Response("Thank you."),
Response("Thanks."),
]
)
self.adapter.update(statement)
response = self.adapter.find(statement.text)
self.assertEqual(len(response.in_response_to), 2)
def test_getting_and_updating_statement(self):
statement = Statement("Hi")
self.adapter.update(statement)
statement.add_response(Response("Hello"))
statement.add_response(Response("Hello"))
self.adapter.update(statement)
response = self.adapter.find(statement.text)
self.assertEqual(len(response.in_response_to), 1)
self.assertEqual(response.in_response_to[0].occurrence, 2)
def test_deserialize_responses(self):
response_list = [
{"text": "Test", "occurrence": 3},
{"text": "Testing", "occurrence": 1},
]
results = self.adapter.deserialize_responses(response_list)
self.assertEqual(len(results), 2)
class JsonDatabaseAdapterFilterTestCase(JsonAdapterTestCase):
def setUp(self):
super(JsonDatabaseAdapterFilterTestCase, self).setUp()
self.statement1 = Statement(
"Testing...",
in_response_to=[
Response("Why are you counting?")
]
)
self.statement2 = Statement(
"Testing one, two, three.",
in_response_to=[
Response("Testing...")
]
)
def test_filter_text_no_matches(self):
self.adapter.update(self.statement1)
results = self.adapter.filter(text="Howdy")
self.assertEqual(len(results), 0)
def test_filter_in_response_to_no_matches(self):
self.adapter.update(self.statement1)
results = self.adapter.filter(
in_response_to=[Response("Maybe")]
)
self.assertEqual(len(results), 0)
def test_filter_equal_results(self):
statement1 = Statement(
"Testing...",
in_response_to=[]
)
statement2 = Statement(
"Testing one, two, three.",
in_response_to=[]
)
self.adapter.update(statement1)
self.adapter.update(statement2)
results = self.adapter.filter(in_response_to=[])
self.assertEqual(len(results), 2)
self.assertIn(statement1, results)
self.assertIn(statement2, results)
def test_filter_contains_result(self):
self.adapter.update(self.statement1)
self.adapter.update(self.statement2)
results = self.adapter.filter(
in_response_to__contains="Why are you counting?"
)
self.assertEqual(len(results), 1)
self.assertIn(self.statement1, results)
def test_filter_contains_no_result(self):
self.adapter.update(self.statement1)
results = self.adapter.filter(
in_response_to__contains="How do you do?"
)
self.assertEqual(results, [])
def test_filter_multiple_parameters(self):
self.adapter.update(self.statement1)
self.adapter.update(self.statement2)
results = self.adapter.filter(
text="Testing...",
in_response_to__contains="Why are you counting?"
)
self.assertEqual(len(results), 1)
self.assertIn(self.statement1, results)
def test_filter_multiple_parameters_no_results(self):
self.adapter.update(self.statement1)
self.adapter.update(self.statement2)
results = self.adapter.filter(
text="Test",
in_response_to__contains="Not an existing response."
)
self.assertEqual(len(results), 0)
def test_filter_no_parameters(self):
"""
If no parameters are passed to the filter,
then all statements should be returned.
"""
statement1 = Statement("Testing...")
statement2 = Statement("Testing one, two, three.")
self.adapter.update(statement1)
self.adapter.update(statement2)
results = self.adapter.filter()
self.assertEqual(len(results), 2)
def test_filter_returns_statement_with_multiple_responses(self):
statement = Statement(
"You are welcome.",
in_response_to=[
Response("Thanks."),
Response("Thank you.")
]
)
self.adapter.update(statement)
response = self.adapter.filter(
in_response_to__contains="Thanks."
)
# Get the first response
response = response[0]
self.assertEqual(len(response.in_response_to), 2)
def test_response_list_in_results(self):
"""
If a statement with response values is found using
the filter method, they should be returned as
response objects.
"""
statement = Statement(
"The first is to help yourself, the second is to help others.",
in_response_to=[
Response("Why do people have two hands?")
]
)
self.adapter.update(statement)
found = self.adapter.filter(text=statement.text)
self.assertEqual(len(found[0].in_response_to), 1)
self.assertEqual(type(found[0].in_response_to[0]), Response)
class ReadOnlyJsonDatabaseAdapterTestCase(JsonAdapterTestCase):
def test_update_does_not_add_new_statement(self):
self.adapter.read_only = True
statement = Statement("New statement")
self.adapter.update(statement)
statement_found = self.adapter.find("New statement")
self.assertEqual(statement_found, None)
def test_update_does_not_modify_existing_statement(self):
statement = Statement("New statement")
self.adapter.update(statement)
self.adapter.read_only = True
statement.add_response(
Statement("New response")
)
self.adapter.update(statement)
statement_found = self.adapter.find("New statement")
self.assertEqual(statement_found.text, statement.text)
self.assertEqual(
len(statement_found.in_response_to), 0
)
| {
"content_hash": "54b3c2ffd086a00fb8032ea535907238",
"timestamp": "",
"source": "github",
"line_count": 335,
"max_line_length": 75,
"avg_line_length": 30.946268656716416,
"alnum_prop": 0.60673290247902,
"repo_name": "DarkmatterVale/ChatterBot",
"id": "9198e991238196c5bed597eff0b14c9367860c9b",
"size": "10367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/storage_adapter_tests/test_jsondb_adapter.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "113984"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("portal", "0003_auto_20180411_0101"),
]
operations = [
migrations.AlterField(
model_name="profile",
name="user",
field=models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="profile",
to=settings.AUTH_USER_MODEL,
),
),
]
| {
"content_hash": "ec257a3dae34a7d53a51e65e0476a7db",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 66,
"avg_line_length": 26.869565217391305,
"alnum_prop": 0.5938511326860841,
"repo_name": "huangsam/chowist",
"id": "670ac6f6a63467e06597dab4747c96fbfe32cf40",
"size": "667",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "portal/migrations/0004_auto_20200710_0340.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "821"
},
{
"name": "Dockerfile",
"bytes": "336"
},
{
"name": "HTML",
"bytes": "14998"
},
{
"name": "Python",
"bytes": "56434"
},
{
"name": "Shell",
"bytes": "1463"
}
],
"symlink_target": ""
} |
import collections
import functools
import six
from heat.engine import properties
from heat.engine.resources.software_config import software_config as sc
from heat.engine.resources.software_config import software_deployment as sd
class StructuredConfig(sc.SoftwareConfig):
'''
This resource is like OS::Heat::SoftwareConfig except that the config
property is represented by a Map rather than a String.
This is useful for configuration tools which use YAML or JSON as their
configuration syntax. The resulting configuration is transferred,
stored and returned by the software_configs API as parsed JSON.
'''
PROPERTIES = (
GROUP,
CONFIG,
OPTIONS,
INPUTS,
OUTPUTS
) = (
sc.SoftwareConfig.GROUP,
sc.SoftwareConfig.CONFIG,
sc.SoftwareConfig.OPTIONS,
sc.SoftwareConfig.INPUTS,
sc.SoftwareConfig.OUTPUTS
)
properties_schema = {
GROUP: sc.SoftwareConfig.properties_schema[GROUP],
OPTIONS: sc.SoftwareConfig.properties_schema[OPTIONS],
INPUTS: sc.SoftwareConfig.properties_schema[INPUTS],
OUTPUTS: sc.SoftwareConfig.properties_schema[OUTPUTS],
CONFIG: properties.Schema(
properties.Schema.MAP,
_('Map representing the configuration data structure which will '
'be serialized to JSON format.')
)
}
class StructuredDeployment(sd.SoftwareDeployment):
'''
A deployment resource like OS::Heat::SoftwareDeployment, but which
performs input value substitution on the config defined by a
OS::Heat::StructuredConfig resource.
Some configuration tools have no concept of inputs, so the input value
substitution needs to occur in the deployment resource. An example of this
is the JSON metadata consumed by the cfn-init tool.
Where the config contains {get_input: input_name} this will be substituted
with the value of input_name in this resource's input_values. If get_input
needs to be passed through to the substituted configuration then a
different input_key property value can be specified.
'''
PROPERTIES = (
CONFIG,
SERVER,
INPUT_VALUES,
DEPLOY_ACTIONS,
NAME,
SIGNAL_TRANSPORT,
INPUT_KEY
) = (
sd.SoftwareDeployment.CONFIG,
sd.SoftwareDeployment.SERVER,
sd.SoftwareDeployment.INPUT_VALUES,
sd.SoftwareDeployment.DEPLOY_ACTIONS,
sd.SoftwareDeployment.NAME,
sd.SoftwareDeployment.SIGNAL_TRANSPORT,
'input_key'
)
_sd_ps = sd.SoftwareDeployment.properties_schema
properties_schema = {
CONFIG: _sd_ps[CONFIG],
SERVER: _sd_ps[SERVER],
INPUT_VALUES: _sd_ps[INPUT_VALUES],
DEPLOY_ACTIONS: _sd_ps[DEPLOY_ACTIONS],
SIGNAL_TRANSPORT: _sd_ps[SIGNAL_TRANSPORT],
NAME: _sd_ps[NAME],
INPUT_KEY: properties.Schema(
properties.Schema.STRING,
_('Name of key to use for substituting inputs during deployment'),
default='get_input',
)
}
def _build_derived_config(self, action, source,
derived_inputs, derived_options):
cfg = source.get(sc.SoftwareConfig.CONFIG)
input_key = self.properties.get(self.INPUT_KEY)
inputs = dict((i['name'], i['value']) for i in derived_inputs)
return self.parse(inputs, input_key, cfg)
@staticmethod
def parse(inputs, input_key, snippet):
parse = functools.partial(
StructuredDeployment.parse, inputs, input_key)
if isinstance(snippet, collections.Mapping):
if len(snippet) == 1:
fn_name, args = next(six.iteritems(snippet))
if fn_name == input_key:
if isinstance(args, basestring):
return inputs.get(args)
return dict((k, parse(v)) for k, v in six.iteritems(snippet))
elif (not isinstance(snippet, basestring) and
isinstance(snippet, collections.Iterable)):
return [parse(v) for v in snippet]
else:
return snippet
class StructuredDeployments(sd.SoftwareDeployments):
PROPERTIES = (
SERVERS,
CONFIG,
INPUT_VALUES,
DEPLOY_ACTIONS,
NAME,
SIGNAL_TRANSPORT,
INPUT_KEY,
) = (
sd.SoftwareDeployments.SERVERS,
sd.SoftwareDeployments.CONFIG,
sd.SoftwareDeployments.INPUT_VALUES,
sd.SoftwareDeployments.DEPLOY_ACTIONS,
sd.SoftwareDeployments.NAME,
sd.SoftwareDeployments.SIGNAL_TRANSPORT,
StructuredDeployment.INPUT_KEY
)
_sds_ps = sd.SoftwareDeployments.properties_schema
properties_schema = {
SERVERS: _sds_ps[SERVERS],
CONFIG: _sds_ps[CONFIG],
INPUT_VALUES: _sds_ps[INPUT_VALUES],
DEPLOY_ACTIONS: _sds_ps[DEPLOY_ACTIONS],
SIGNAL_TRANSPORT: _sds_ps[SIGNAL_TRANSPORT],
NAME: _sds_ps[NAME],
INPUT_KEY: StructuredDeployment.properties_schema[INPUT_KEY],
}
def _build_resource_definition(self, include_all=False):
p = self.properties
return {
self.RESOURCE_DEF_TYPE: 'OS::Heat::StructuredDeployment',
self.RESOURCE_DEF_PROPERTIES: {
self.CONFIG: p[self.CONFIG],
self.INPUT_VALUES: p[self.INPUT_VALUES],
self.DEPLOY_ACTIONS: p[self.DEPLOY_ACTIONS],
self.SIGNAL_TRANSPORT: p[self.SIGNAL_TRANSPORT],
self.NAME: p[self.NAME],
self.INPUT_KEY: p[self.INPUT_KEY],
}
}
def resource_mapping():
return {
'OS::Heat::StructuredConfig': StructuredConfig,
'OS::Heat::StructuredDeployment': StructuredDeployment,
'OS::Heat::StructuredDeployments': StructuredDeployments,
}
| {
"content_hash": "ec576ec517ca7872f7b50295024a639f",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 78,
"avg_line_length": 33.378531073446325,
"alnum_prop": 0.6327014218009479,
"repo_name": "redhat-openstack/heat",
"id": "a44f7c14244d670d98afb9f55917c1f0f9bdf239",
"size": "6528",
"binary": false,
"copies": "1",
"ref": "refs/heads/f22-patches",
"path": "heat/engine/resources/software_config/structured_config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4827027"
},
{
"name": "Shell",
"bytes": "26720"
}
],
"symlink_target": ""
} |
"""
38. User-registered management commands
The manage.py utility provides a number of useful commands for managing a
Django project. If you want to add a utility command of your own, you can.
The user-defined command 'dance' is defined in the management/commands
subdirectory of this test application. It is a simple command that responds
with a printed message when invoked.
For more details on how to define your own manage.py commands, look at the
django.core.management.commands directory. This directory contains the
definitions for the base Django manage.py commands.
"""
__test__ = {'API_TESTS': """
>>> from django.core import management
# Invoke a simple user-defined command
>>> management.call_command('dance')
I don't feel like dancing.
# Invoke a command that doesn't exist
>>> management.call_command('explode')
Traceback (most recent call last):
...
CommandError: Unknown command: 'explode'
"""}
| {
"content_hash": "8ea14d0e963d3e65fe6bd52f35112a0b",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 76,
"avg_line_length": 30.733333333333334,
"alnum_prop": 0.7613882863340564,
"repo_name": "rawwell/django",
"id": "6db4b049df3294e85a4a740d952102535dda06e1",
"size": "922",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "tests/modeltests/user_commands/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "66105"
},
{
"name": "Python",
"bytes": "2924201"
},
{
"name": "Shell",
"bytes": "804"
}
],
"symlink_target": ""
} |
"""
MoinMoin - Teud Macro
This integrates the "Teud" documentation system into
MoinMoin. Besides Teud, you need 4XSLT.
Teud: http://purl.net/wiki/python/TeudProject
4XSLT: http://4suite.org/
@copyright: 2001 Juergen Hermann <jh@web.de>
@license: GNU GPL, see COPYING for details.
"""
_imperr = None
try:
from teud import xmldoc, pydoc
except ImportError, _imperr:
pass
try:
from xml.xslt.Processor import Processor
except ImportError, _imperr:
pass
from MoinMoin import config, wikiutil
Dependencies = ["time"]
def macro_TeudView(macro):
if _imperr: return "Error in TeudView macro: " + str(_imperr)
#dtdfile = xmldoc.getDTDPath()
xslfile = xmldoc.getDataPath('webde.xsl')
pagename = macro.formatter.page.page_name
if 'module' in macro.request.args:
modname = macro.request.args["module"]
try:
obj = pydoc.locate(modname)
except pydoc.ErrorDuringImport, value:
return "Error while loading module %s: %s" % (modname, value)
else:
xmlstr = xmldoc.xml.document(obj, encoding=config.charset)
navigation = '<a href="%s">Index</a>' % pagename
pathlen = modname.count('.')
if pathlen:
navigation = navigation + ' | '
modparts = modname.split('.')
for pathidx in range(pathlen):
path = '.'.join(modparts[:pathidx+1])
navigation = navigation + '<a href="%s?module=%s">%s</a>' % (
pagename, path, modparts[pathidx])
if pathidx < pathlen:
navigation = navigation + '.'
navigation = navigation + '<hr size="1">'
else:
# generate index
xmlstr = xmldoc.xml.document(None, encoding=config.charset)
navigation = ''
processor = Processor()
processor.appendStylesheetFile(xslfile)
try:
result = processor.runString(xmlstr,
topLevelParams={
'uri-prefix': pagename + "?module=",
'uri-suffix': "",
}
)
except:
print wikiutil.escape(xmlstr)
raise
return navigation + result
| {
"content_hash": "3432c013d69ba3263ad493d983a8780d",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 77,
"avg_line_length": 29.186666666666667,
"alnum_prop": 0.5879396984924623,
"repo_name": "RealTimeWeb/wikisite",
"id": "4b7f7b5d454339f801dd0ed2d25de1cf2fbd4bd2",
"size": "2218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MoinMoin/macro/TeudView.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "49395"
},
{
"name": "CSS",
"bytes": "204104"
},
{
"name": "ColdFusion",
"bytes": "142312"
},
{
"name": "Java",
"bytes": "491798"
},
{
"name": "JavaScript",
"bytes": "2107106"
},
{
"name": "Lasso",
"bytes": "23464"
},
{
"name": "Makefile",
"bytes": "4950"
},
{
"name": "PHP",
"bytes": "144585"
},
{
"name": "Perl",
"bytes": "44627"
},
{
"name": "Python",
"bytes": "7647140"
},
{
"name": "Shell",
"bytes": "335"
}
],
"symlink_target": ""
} |
from __future__ import division, print_function
import logging
import numpy as np
FORMAT = '[%(asctime)s] %(name)-15s %(message)s'
DATEFMT = "%H:%M:%S"
logging.basicConfig(format=FORMAT, datefmt=DATEFMT, level=logging.INFO)
import os
import theano
import theano.tensor as T
import fuel
import ipdb
import time
import cPickle as pickle
from argparse import ArgumentParser
from theano import tensor
from fuel.streams import DataStream
from fuel.schemes import SequentialScheme
from fuel.transformers import Flatten
from blocks.algorithms import GradientDescent, CompositeRule, StepClipping, RMSProp, Adam
from blocks.bricks import Tanh, Identity
from blocks.bricks.cost import BinaryCrossEntropy
from blocks.bricks.recurrent import SimpleRecurrent, LSTM
from blocks.initialization import Constant, IsotropicGaussian, Orthogonal
from blocks.filter import VariableFilter
from blocks.graph import ComputationGraph
from blocks.roles import PARAMETER
from blocks.monitoring import aggregation
from blocks.extensions import FinishAfter, Timing, Printing, ProgressBar
from blocks.extensions.saveload import Checkpoint
from blocks.extensions.monitoring import DataStreamMonitoring, TrainingDataMonitoring
from blocks.main_loop import MainLoop
from blocks.model import Model
try:
from blocks.extras import Plot
except ImportError:
pass
import draw.datasets as datasets
from draw.draw import *
from draw.samplecheckpoint import SampleCheckpoint
from draw.partsonlycheckpoint import PartsOnlyCheckpoint
sys.setrecursionlimit(100000)
#----------------------------------------------------------------------------
def main(name, dataset, epochs, batch_size, learning_rate, attention,
n_iter, enc_dim, dec_dim, z_dim, oldmodel, live_plotting):
image_size, channels, data_train, data_valid, data_test = datasets.get_data(dataset)
train_stream = Flatten(DataStream.default_stream(data_train, iteration_scheme=SequentialScheme(data_train.num_examples, batch_size)))
valid_stream = Flatten(DataStream.default_stream(data_valid, iteration_scheme=SequentialScheme(data_valid.num_examples, batch_size)))
test_stream = Flatten(DataStream.default_stream(data_test, iteration_scheme=SequentialScheme(data_test.num_examples, batch_size)))
if name is None:
name = dataset
img_height, img_width = image_size
x_dim = channels * img_height * img_width
rnninits = {
#'weights_init': Orthogonal(),
'weights_init': IsotropicGaussian(0.01),
'biases_init': Constant(0.),
}
inits = {
#'weights_init': Orthogonal(),
'weights_init': IsotropicGaussian(0.01),
'biases_init': Constant(0.),
}
# Configure attention mechanism
if attention != "":
read_N, write_N = attention.split(',')
read_N = int(read_N)
write_N = int(write_N)
read_dim = 2 * channels * read_N ** 2
reader = AttentionReader(x_dim=x_dim, dec_dim=dec_dim,
channels=channels, width=img_width, height=img_height,
N=read_N, **inits)
writer = AttentionWriter(input_dim=dec_dim, output_dim=x_dim,
channels=channels, width=img_width, height=img_height,
N=write_N, **inits)
attention_tag = "r%d-w%d" % (read_N, write_N)
else:
read_dim = 2*x_dim
reader = Reader(x_dim=x_dim, dec_dim=dec_dim, **inits)
writer = Writer(input_dim=dec_dim, output_dim=x_dim, **inits)
attention_tag = "full"
#----------------------------------------------------------------------
if name is None:
name = dataset
# Learning rate
def lr_tag(value):
""" Convert a float into a short tag-usable string representation. E.g.:
0.1 -> 11
0.01 -> 12
0.001 -> 13
0.005 -> 53
"""
exp = np.floor(np.log10(value))
leading = ("%e"%value)[0]
return "%s%d" % (leading, -exp)
lr_str = lr_tag(learning_rate)
subdir = name + "-" + time.strftime("%Y%m%d-%H%M%S");
longname = "%s-%s-t%d-enc%d-dec%d-z%d-lr%s" % (dataset, attention_tag, n_iter, enc_dim, dec_dim, z_dim, lr_str)
pickle_file = subdir + "/" + longname + ".pkl"
print("\nRunning experiment %s" % longname)
print(" dataset: %s" % dataset)
print(" subdirectory: %s" % subdir)
print(" learning rate: %g" % learning_rate)
print(" attention: %s" % attention)
print(" n_iterations: %d" % n_iter)
print(" encoder dimension: %d" % enc_dim)
print(" z dimension: %d" % z_dim)
print(" decoder dimension: %d" % dec_dim)
print(" batch size: %d" % batch_size)
print(" epochs: %d" % epochs)
print()
#----------------------------------------------------------------------
encoder_rnn = LSTM(dim=enc_dim, name="RNN_enc", **rnninits)
decoder_rnn = LSTM(dim=dec_dim, name="RNN_dec", **rnninits)
encoder_mlp = MLP([Identity()], [(read_dim+dec_dim), 4*enc_dim], name="MLP_enc", **inits)
decoder_mlp = MLP([Identity()], [ z_dim, 4*dec_dim], name="MLP_dec", **inits)
q_sampler = Qsampler(input_dim=enc_dim, output_dim=z_dim, **inits)
draw = DrawModel(
n_iter,
reader=reader,
encoder_mlp=encoder_mlp,
encoder_rnn=encoder_rnn,
sampler=q_sampler,
decoder_mlp=decoder_mlp,
decoder_rnn=decoder_rnn,
writer=writer)
draw.initialize()
#------------------------------------------------------------------------
x = tensor.matrix(u'features')
x_recons, h_enc, c_enc, z, kl_terms, i_dec, h_dec, c_dec, center_y, center_x, delta = draw.reconstruct(x)
recons_term = BinaryCrossEntropy().apply(x, x_recons)
recons_term.name = "recons_term"
cost = recons_term + kl_terms.sum(axis=0).mean()
cost.name = "nll_bound"
#------------------------------------------------------------
cg = ComputationGraph([cost])
params = VariableFilter(roles=[PARAMETER])(cg.variables)
algorithm = GradientDescent(
cost=cost,
parameters=params,
step_rule=CompositeRule([
StepClipping(10.),
Adam(learning_rate),
]),
on_unused_sources='ignore',
#step_rule=RMSProp(learning_rate),
#step_rule=Momentum(learning_rate=learning_rate, momentum=0.95)
)
#------------------------------------------------------------------------
# Setup monitors
monitors = [cost]
for t in range(n_iter):
kl_term_t = kl_terms[t,:].mean()
kl_term_t.name = "kl_term_%d" % t
#x_recons_t = T.nnet.sigmoid(c[t,:,:])
#recons_term_t = BinaryCrossEntropy().apply(x, x_recons_t)
#recons_term_t = recons_term_t.mean()
#recons_term_t.name = "recons_term_%d" % t
monitors +=[kl_term_t]
train_monitors = monitors[:]
train_monitors += [aggregation.mean(algorithm.total_gradient_norm)]
train_monitors += [aggregation.mean(algorithm.total_step_norm)]
# Live plotting...
plot_channels = [
["train_nll_bound", "test_nll_bound"],
["train_kl_term_%d" % t for t in range(n_iter)],
#["train_recons_term_%d" % t for t in range(n_iter)],
["train_total_gradient_norm", "train_total_step_norm"]
]
#------------------------------------------------------------
if not os.path.exists(subdir):
os.makedirs(subdir)
plotting_extensions = []
if live_plotting:
plotting_extensions = [
Plot(name, channels=plot_channels)
]
main_loop = MainLoop(
model=Model(cost),
data_stream=train_stream,
algorithm=algorithm,
extensions=[
Timing(),
FinishAfter(after_n_epochs=epochs),
TrainingDataMonitoring(
train_monitors,
prefix="train",
after_epoch=True),
# DataStreamMonitoring(
# monitors,
# valid_stream,
## updates=scan_updates,
# prefix="valid"),
DataStreamMonitoring(
monitors,
test_stream,
# updates=scan_updates,
prefix="test"),
#Checkpoint(name, before_training=False, after_epoch=True, save_separately=['log', 'model']),
PartsOnlyCheckpoint("{}/{}".format(subdir,name), before_training=True, after_epoch=True, save_separately=['log', 'model']),
SampleCheckpoint(image_size=image_size[0], channels=channels, save_subdir=subdir, before_training=True, after_epoch=True),
ProgressBar(),
Printing()] + plotting_extensions)
if oldmodel is not None:
print("Initializing parameters with old model %s"%oldmodel)
with open(oldmodel, "rb") as f:
oldmodel = pickle.load(f)
main_loop.model.set_parameter_values(oldmodel.get_parameter_values())
del oldmodel
main_loop.run()
#-----------------------------------------------------------------------------
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--live-plotting", "--plot", action="store_true",
default=False, help="Activate live-plotting to a bokeh-server")
parser.add_argument("--name", type=str, dest="name",
default=None, help="Name for this experiment")
parser.add_argument("--dataset", type=str, dest="dataset",
default="bmnist", help="Dataset to use: [bmnist|mnist|cifar10]")
parser.add_argument("--epochs", type=int, dest="epochs",
default=100, help="Number of training epochs to do")
parser.add_argument("--bs", "--batch-size", type=int, dest="batch_size",
default=100, help="Size of each mini-batch")
parser.add_argument("--lr", "--learning-rate", type=float, dest="learning_rate",
default=1e-3, help="Learning rate")
parser.add_argument("--attention", "-a", type=str, default="",
help="Use attention mechanism (read_window,write_window)")
parser.add_argument("--niter", type=int, dest="n_iter",
default=10, help="No. of iterations")
parser.add_argument("--enc-dim", type=int, dest="enc_dim",
default=256, help="Encoder RNN state dimension")
parser.add_argument("--dec-dim", type=int, dest="dec_dim",
default=256, help="Decoder RNN state dimension")
parser.add_argument("--z-dim", type=int, dest="z_dim",
default=100, help="Z-vector dimension")
parser.add_argument("--oldmodel", type=str,
help="Use a model pkl file created by a previous run as a starting point for all parameters")
args = parser.parse_args()
main(**vars(args))
| {
"content_hash": "af834c42f63f31243c93ea219ebc2e26",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 137,
"avg_line_length": 38.25694444444444,
"alnum_prop": 0.5758758395353059,
"repo_name": "drewlinsley/draw_classify",
"id": "b0ad4767b4300ec713b2267d6603c126aa5e6273",
"size": "11041",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "train-draw-sketch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "173005"
},
{
"name": "Matlab",
"bytes": "1212"
},
{
"name": "Python",
"bytes": "297291"
},
{
"name": "Shell",
"bytes": "449"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import sys, os, re, inspect
import imp
try:
import hashlib
except ImportError:
import md5 as hashlib
from distutils.core import Distribution, Extension
from distutils.command.build_ext import build_ext
import Cython
from ..Compiler.Main import Context, CompilationOptions, default_options
from ..Compiler.ParseTreeTransforms import (CythonTransform,
SkipDeclarations, AnalyseDeclarationsTransform, EnvTransform)
from ..Compiler.TreeFragment import parse_from_strings
from .Dependencies import strip_string_literals, cythonize, cached_function
from ..Compiler import Pipeline, Nodes
from ..Utils import get_cython_cache_dir
import cython as cython_module
# A utility function to convert user-supplied ASCII strings to unicode.
if sys.version_info[0] < 3:
def to_unicode(s):
if not isinstance(s, unicode):
return s.decode('ascii')
else:
return s
else:
to_unicode = lambda x: x
class UnboundSymbols(EnvTransform, SkipDeclarations):
def __init__(self):
CythonTransform.__init__(self, None)
self.unbound = set()
def visit_NameNode(self, node):
if not self.current_env().lookup(node.name):
self.unbound.add(node.name)
return node
def __call__(self, node):
super(UnboundSymbols, self).__call__(node)
return self.unbound
@cached_function
def unbound_symbols(code, context=None):
code = to_unicode(code)
if context is None:
context = Context([], default_options)
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
tree = parse_from_strings('(tree fragment)', code)
for phase in Pipeline.create_pipeline(context, 'pyx'):
if phase is None:
continue
tree = phase(tree)
if isinstance(phase, AnalyseDeclarationsTransform):
break
try:
import builtins
except ImportError:
import __builtin__ as builtins
return UnboundSymbols()(tree) - set(dir(builtins))
def unsafe_type(arg, context=None):
py_type = type(arg)
if py_type is int:
return 'long'
else:
return safe_type(arg, context)
def safe_type(arg, context=None):
py_type = type(arg)
if py_type in [list, tuple, dict, str]:
return py_type.__name__
elif py_type is complex:
return 'double complex'
elif py_type is float:
return 'double'
elif py_type is bool:
return 'bint'
elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray):
return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim)
else:
for base_type in py_type.mro():
if base_type.__module__ in ('__builtin__', 'builtins'):
return 'object'
module = context.find_module(base_type.__module__, need_pxd=False)
if module:
entry = module.lookup(base_type.__name__)
if entry.is_type:
return '%s.%s' % (base_type.__module__, base_type.__name__)
return 'object'
def _get_build_extension():
dist = Distribution()
# Ensure the build respects distutils configuration by parsing
# the configuration files
config_files = dist.find_config_files()
dist.parse_config_files(config_files)
build_extension = build_ext(dist)
build_extension.finalize_options()
return build_extension
@cached_function
def _create_context(cython_include_dirs):
return Context(list(cython_include_dirs), default_options)
def cython_inline(code,
get_type=unsafe_type,
lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
cython_include_dirs=['.'],
force=False,
quiet=False,
locals=None,
globals=None,
**kwds):
if get_type is None:
get_type = lambda x: 'object'
code = to_unicode(code)
orig_code = code
code, literals = strip_string_literals(code)
code = strip_common_indent(code)
ctx = _create_context(tuple(cython_include_dirs))
if locals is None:
locals = inspect.currentframe().f_back.f_back.f_locals
if globals is None:
globals = inspect.currentframe().f_back.f_back.f_globals
try:
for symbol in unbound_symbols(code):
if symbol in kwds:
continue
elif symbol in locals:
kwds[symbol] = locals[symbol]
elif symbol in globals:
kwds[symbol] = globals[symbol]
else:
print("Couldn't find ", symbol)
except AssertionError:
if not quiet:
# Parsing from strings not fully supported (e.g. cimports).
print("Could not parse code as a string (to extract unbound symbols).")
cimports = []
for name, arg in kwds.items():
if arg is cython_module:
cimports.append('\ncimport cython as %s' % name)
del kwds[name]
arg_names = kwds.keys()
arg_names.sort()
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
key = orig_code, arg_sigs, sys.version_info, sys.executable, Cython.__version__
module_name = "_cython_inline_" + hashlib.md5(str(key).encode('utf-8')).hexdigest()
if module_name in sys.modules:
module = sys.modules[module_name]
else:
build_extension = None
if cython_inline.so_ext is None:
# Figure out and cache current extension suffix
build_extension = _get_build_extension()
cython_inline.so_ext = build_extension.get_ext_filename('')
module_path = os.path.join(lib_dir, module_name + cython_inline.so_ext)
if not os.path.exists(lib_dir):
os.makedirs(lib_dir)
if force or not os.path.isfile(module_path):
cflags = []
c_include_dirs = []
qualified = re.compile(r'([.\w]+)[.]')
for type, _ in arg_sigs:
m = qualified.match(type)
if m:
cimports.append('\ncimport %s' % m.groups()[0])
# one special case
if m.groups()[0] == 'numpy':
import numpy
c_include_dirs.append(numpy.get_include())
# cflags.append('-Wno-unused')
module_body, func_body = extract_func_code(code)
params = ', '.join(['%s %s' % a for a in arg_sigs])
module_code = """
%(module_body)s
%(cimports)s
def __invoke(%(params)s):
%(func_body)s
return locals()
""" % {'cimports': '\n'.join(cimports),
'module_body': module_body,
'params': params,
'func_body': func_body }
for key, value in literals.items():
module_code = module_code.replace(key, value)
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
fh = open(pyx_file, 'w')
try:
fh.write(module_code)
finally:
fh.close()
extension = Extension(
name = module_name,
sources = [pyx_file],
include_dirs = c_include_dirs,
extra_compile_args = cflags)
if build_extension is None:
build_extension = _get_build_extension()
build_extension.extensions = cythonize([extension], include_path=cython_include_dirs, quiet=quiet)
build_extension.build_temp = os.path.dirname(pyx_file)
build_extension.build_lib = lib_dir
build_extension.run()
module = imp.load_dynamic(module_name, module_path)
arg_list = [kwds[arg] for arg in arg_names]
return module.__invoke(*arg_list)
# Cached suffix used by cython_inline above. None should get
# overridden with actual value upon the first cython_inline invocation
cython_inline.so_ext = None
non_space = re.compile('[^ ]')
def strip_common_indent(code):
min_indent = None
lines = code.split('\n')
for line in lines:
match = non_space.search(line)
if not match:
continue # blank
indent = match.start()
if line[indent] == '#':
continue # comment
elif min_indent is None or min_indent > indent:
min_indent = indent
for ix, line in enumerate(lines):
match = non_space.search(line)
if not match or line[indent] == '#':
continue
else:
lines[ix] = line[min_indent:]
return '\n'.join(lines)
module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))')
def extract_func_code(code):
module = []
function = []
current = function
code = code.replace('\t', ' ')
lines = code.split('\n')
for line in lines:
if not line.startswith(' '):
if module_statement.match(line):
current = module
else:
current = function
current.append(line)
return '\n'.join(module), ' ' + '\n '.join(function)
try:
from inspect import getcallargs
except ImportError:
def getcallargs(func, *arg_values, **kwd_values):
all = {}
args, varargs, kwds, defaults = inspect.getargspec(func)
if varargs is not None:
all[varargs] = arg_values[len(args):]
for name, value in zip(args, arg_values):
all[name] = value
for name, value in kwd_values.items():
if name in args:
if name in all:
raise TypeError("Duplicate argument %s" % name)
all[name] = kwd_values.pop(name)
if kwds is not None:
all[kwds] = kwd_values
elif kwd_values:
raise TypeError("Unexpected keyword arguments: %s" % kwd_values.keys())
if defaults is None:
defaults = ()
first_default = len(args) - len(defaults)
for ix, name in enumerate(args):
if name not in all:
if ix >= first_default:
all[name] = defaults[ix - first_default]
else:
raise TypeError("Missing argument: %s" % name)
return all
def get_body(source):
ix = source.index(':')
if source[:5] == 'lambda':
return "return %s" % source[ix+1:]
else:
return source[ix+1:]
# Lots to be done here... It would be especially cool if compiled functions
# could invoke each other quickly.
class RuntimeCompiledFunction(object):
def __init__(self, f):
self._f = f
self._body = get_body(inspect.getsource(f))
def __call__(self, *args, **kwds):
all = getcallargs(self._f, *args, **kwds)
return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
| {
"content_hash": "d952e2995676ec9d88cf2672a0292067",
"timestamp": "",
"source": "github",
"line_count": 310,
"max_line_length": 110,
"avg_line_length": 35.409677419354836,
"alnum_prop": 0.5799398742825909,
"repo_name": "andreasvc/cython",
"id": "e169599818ba8c5d22a1af5ab99e093414968504",
"size": "10977",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "Cython/Build/Inline.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2098"
},
{
"name": "C",
"bytes": "446141"
},
{
"name": "C++",
"bytes": "16214"
},
{
"name": "CSS",
"bytes": "11567"
},
{
"name": "Emacs Lisp",
"bytes": "11931"
},
{
"name": "HTML",
"bytes": "112723"
},
{
"name": "JavaScript",
"bytes": "15703"
},
{
"name": "Makefile",
"bytes": "4740"
},
{
"name": "PowerShell",
"bytes": "3243"
},
{
"name": "Python",
"bytes": "5174030"
},
{
"name": "Smalltalk",
"bytes": "618"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'PaymentMethodType'
db.create_table('buckaroo_paymentmethodtype', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
('sysname', self.gf('django.db.models.fields.CharField')(max_length=96, blank=True)),
))
db.send_create_signal('buckaroo', ['PaymentMethodType'])
# Adding model 'PaymentMethod'
db.create_table('buckaroo_paymentmethod', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
('sysname', self.gf('django.db.models.fields.CharField')(max_length=96, blank=True)),
('payment_method_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['buckaroo.PaymentMethodType'], null=True, blank=True)),
))
db.send_create_signal('buckaroo', ['PaymentMethod'])
# Adding model 'Transaction'
db.create_table('buckaroo_transaction', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('remote_id', self.gf('django.db.models.fields.CharField')(max_length=192, blank=True)),
('invoice', self.gf('django.db.models.fields.CharField')(unique=True, max_length=48)),
('reference', self.gf('django.db.models.fields.CharField')(unique=True, max_length=48)),
('price', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=8, decimal_places=2, blank=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=765, blank=True)),
('return_url', self.gf('django.db.models.fields.CharField')(max_length=765, blank=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('buckaroo', ['Transaction'])
# Adding model 'TransactionState'
db.create_table('buckaroo_transactionstate', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('payment_method', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['buckaroo.PaymentMethod'], null=True, blank=True)),
('transaction', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['buckaroo.Transaction'], null=True, blank=True)),
('valid_from', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('valid_to', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('response_message', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('email', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('account_name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('account_number', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('idealurl', self.gf('django.db.models.fields.CharField')(max_length=765, null=True, blank=True)),
('transactionkey', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('responsestatusdescription', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('idealtransactionid', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('responsestatus', self.gf('django.db.models.fields.CharField')(max_length=16, null=True, blank=True)),
('additionalmessage', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
))
db.send_create_signal('buckaroo', ['TransactionState'])
def backwards(self, orm):
# Deleting model 'PaymentMethodType'
db.delete_table('buckaroo_paymentmethodtype')
# Deleting model 'PaymentMethod'
db.delete_table('buckaroo_paymentmethod')
# Deleting model 'Transaction'
db.delete_table('buckaroo_transaction')
# Deleting model 'TransactionState'
db.delete_table('buckaroo_transactionstate')
models = {
'buckaroo.paymentmethod': {
'Meta': {'object_name': 'PaymentMethod'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'payment_method_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['buckaroo.PaymentMethodType']", 'null': 'True', 'blank': 'True'}),
'sysname': ('django.db.models.fields.CharField', [], {'max_length': '96', 'blank': 'True'})
},
'buckaroo.paymentmethodtype': {
'Meta': {'object_name': 'PaymentMethodType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sysname': ('django.db.models.fields.CharField', [], {'max_length': '96', 'blank': 'True'})
},
'buckaroo.transaction': {
'Meta': {'object_name': 'Transaction'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '765', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '48'}),
'price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '48'}),
'remote_id': ('django.db.models.fields.CharField', [], {'max_length': '192', 'blank': 'True'}),
'return_url': ('django.db.models.fields.CharField', [], {'max_length': '765', 'blank': 'True'})
},
'buckaroo.transactionstate': {
'Meta': {'object_name': 'TransactionState'},
'account_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'account_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'additionalmessage': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'idealtransactionid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'idealurl': ('django.db.models.fields.CharField', [], {'max_length': '765', 'null': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'payment_method': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['buckaroo.PaymentMethod']", 'null': 'True', 'blank': 'True'}),
'response_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'responsestatus': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'responsestatusdescription': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'transaction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['buckaroo.Transaction']", 'null': 'True', 'blank': 'True'}),
'transactionkey': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'valid_from': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'valid_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['buckaroo'] | {
"content_hash": "96a9a4496c2ab3e553fce9359edc35d7",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 164,
"avg_line_length": 72.38524590163935,
"alnum_prop": 0.6003850073604349,
"repo_name": "quarkness/django-buckaroo",
"id": "a62b715da3971d06b8a67e2677891b6bb61d5d5f",
"size": "8855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django-buckaroo/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "32991"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore
from serieswatcher.config import Config
from serieswatcher.thetvdb import TheTVDB
class MakeSearch(QtCore.QObject):
searchFinished = QtCore.pyqtSignal(list)
def __init__(self, userInput):
super(MakeSearch, self).__init__()
self._userInput = userInput
def run(self):
bdd = TheTVDB()
languages = tuple(Config.config['languages'].split(','))
seriesFound = []
for lang in languages:
seriesFound.extend(bdd.search_serie(self._userInput, lang))
self.searchFinished.emit(seriesFound)
| {
"content_hash": "62135313b5f8ff93ec648b6b0fcecf65",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 71,
"avg_line_length": 30.842105263157894,
"alnum_prop": 0.6655290102389079,
"repo_name": "lightcode/SeriesWatcher",
"id": "bb1c40da91cc1fa5838f2911bd3da879b543a1a2",
"size": "611",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "serieswatcher/serieswatcher/tasks/makesearch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2594"
},
{
"name": "JavaScript",
"bytes": "1771"
},
{
"name": "Python",
"bytes": "1561181"
},
{
"name": "R",
"bytes": "2748"
}
],
"symlink_target": ""
} |
import math
import numpy as np
import scipy.sparse.linalg.eigen
def norm(vec):
"""
Find the norm of the given (one-dimensional) vector.
"""
return math.sqrt(vec.dot(vec))
def normalize(vec):
"""
Normalize the given vector. Zero vectors remain zero.
"""
vec_norm = norm(vec)
if vec_norm == 0.0:
return vec
return vec / vec_norm
def row_norms(mat):
return np.sqrt((mat * mat).sum(axis=1))
def col_norms(mat):
return np.sqrt((mat * mat).sum(axis=0))
def normalize_rows(mat, offset=0.0):
"""
Normalize the rows of the given matrix.
If desired, pass an offset which will be added to the row norms, to cause
very small rows to stay small in a smooth fashion.
"""
return mat / (row_norms(mat) + offset)[:, np.newaxis]
def eigensystem(mat, k, strip_a0=False):
"""
Find the eigenvalues and eigenvectors of the given symmetric sparse matrix.
This is mostly a wrapper around SciPy's eigsh() function, except we:
* Convert to Compressed Sparse Rows for speed
* Sort by descending eigenvalue
* Optionally remove the largest eigenvalue
k is the desired rank, and strip_a0=True removes the largest eigenvalue.
"""
# Compute the actual number of eigenvalues to find.
# It can't actually solve for all of them.
offset = 1 if strip_a0 else 0
real_k = min(k + offset, mat.shape[0] - 1)
if real_k < 1:
raise ValueError("Attempted to solve for no eigenvalues.")
# Find the eigenvalues
S, U = scipy.sparse.linalg.eigen.eigsh(mat.tocsr(), k=real_k)
# Sort and trim
order = np.argsort(S)[::-1]
S = S[order][offset:]
U = U[:, order][:, offset:]
return U, S
def combine_eigenspaces(U_X, S_X, U_Y, S_Y, rank=None):
'''
Given the eigenvalue decompositions of X and Y, find that of (X + Y).
The decompositions must have aligned labels; that is, row i of matrix X
should refer to the same thing as row i of matrix Y, even if that means
the row has to be the zero vector. The `AssocSpace.merged_with`
function is a higher-level version that takes care of row alignment.
Inputs:
- U_X, S_X: the decomposition of matrix X.
- U_Y, S_Y: the decomposition of matrix Y.
- rank: the number of dimensions to trim the result to.
Returns: the new decomposition U, S.
The algorithm is adapted from Brand 2006 (MERL TR2006-059) [1], section 2,
to operate on eigenvalue decompositions instead of SVDs.
[1] http://www.merl.com/publications/docs/TR2006-059.pdf
'''
# Find the basis for the orthogonal component of U_Y
M_1 = U_X.T.dot(U_Y)
Q, R = np.linalg.qr(U_Y - U_X.dot(M_1)) # Eqn. (1)
# Express X + Y in the combined basis
M_2 = np.r_[M_1, R]
K = (np.asarray(M_2) * S_Y).dot(M_2.T) # Eqn. (2), right side of sum
for i in range(len(S_X)):
K[i, i] += S_X[i] # Eqn. (2), left side of sum
# Diagonalize
Sp, Up = np.linalg.eigh(K) # Eqn. (3)
# Sort and trim - we do this on the small matrices, for speed
order = np.argsort(Sp)[::-1]
Sp = Sp[order]
Up = Up[:, order]
if rank is not None:
Sp = Sp[:rank]
Up = Up[:, :rank]
# Done!
return np.c_[U_X, Q].dot(Up), Sp # Eqn. (4)
def redecompose(U, S):
'''
Given a "decomposition" U S U^T of a matrix X, find its eigenvalue
decomposition. U need not have normalized or orthogonal columns.
This is useful if you have mangled a previous decomposition in some way
and want to restore proper orthonormal columns with correct eigenvalues.
'''
# This is just a small version of the algorithm from combine_eigenspaces.
# Find a basis for the space spanned by U
Q, R = np.linalg.qr(U)
# Express X in this basis and diagonalize
Sp, Up = np.linalg.eigh((np.asarray(R) * S).dot(R.T))
# Sort - we do this on the small matrices, for speed
order = np.argsort(Sp)[::-1]
Sp = Sp[order]
Up = Up[:, order]
# Done!
return Q.dot(Up), Sp
| {
"content_hash": "bb15a8bc4044a0a0582111c8f569c31b",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 79,
"avg_line_length": 29.424460431654676,
"alnum_prop": 0.6234718826405868,
"repo_name": "redreamality/assoc-space",
"id": "ade88f4fd466fc4a2a13ae5cb3ef0a973b17dc41",
"size": "4090",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "assoc_space/eigenmath.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "43042"
}
],
"symlink_target": ""
} |
from quantum.agent.common import config
from quantum.openstack.common import cfg
DEFAULT_BRIDGE_MAPPINGS = []
DEFAULT_VLAN_RANGES = []
DEFAULT_TUNNEL_RANGES = []
ovs_opts = [
cfg.StrOpt('integration_bridge', default='br-int',
help=_("Integration bridge to use")),
cfg.BoolOpt('enable_tunneling', default=False,
help=_("Enable tunneling support")),
cfg.StrOpt('tunnel_bridge', default='br-tun',
help=_("Tunnel bridge to use")),
cfg.StrOpt('int_peer_patch_port', default='patch-tun',
help=_("Peer patch port in integration bridge for tunnel "
"bridge")),
cfg.StrOpt('tun_peer_patch_port', default='patch-int',
help=_("Peer patch port in tunnel bridge for integration "
"bridge")),
cfg.StrOpt('local_ip', default='',
help=_("Local IP address of GRE tunnel endpoints.")),
cfg.ListOpt('bridge_mappings',
default=DEFAULT_BRIDGE_MAPPINGS,
help=_("List of <physical_network>:<bridge>")),
cfg.StrOpt('tenant_network_type', default='local',
help=_("Network type for tenant networks "
"(local, vlan, gre, or none)")),
cfg.ListOpt('network_vlan_ranges',
default=DEFAULT_VLAN_RANGES,
help=_("List of <physical_network>:<vlan_min>:<vlan_max> "
"or <physical_network>")),
cfg.ListOpt('tunnel_id_ranges',
default=DEFAULT_TUNNEL_RANGES,
help=_("List of <tun_min>:<tun_max>")),
]
agent_opts = [
cfg.IntOpt('polling_interval', default=2,
help=_("The number of seconds the agent will wait between "
"polling for local device changes.")),
]
cfg.CONF.register_opts(ovs_opts, "OVS")
cfg.CONF.register_opts(agent_opts, "AGENT")
config.register_root_helper(cfg.CONF)
| {
"content_hash": "8cdac8eb6e84d5c563843a2ae6571b17",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 74,
"avg_line_length": 40,
"alnum_prop": 0.5802083333333333,
"repo_name": "rossella/neutron",
"id": "4ffe58841919b7c4ca284b2e52fcc27a72bfdc04",
"size": "2571",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "quantum/plugins/openvswitch/common/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "37307"
},
{
"name": "JavaScript",
"bytes": "67928"
},
{
"name": "Perl",
"bytes": "235"
},
{
"name": "Python",
"bytes": "3048930"
},
{
"name": "Shell",
"bytes": "7843"
},
{
"name": "XSLT",
"bytes": "50907"
}
],
"symlink_target": ""
} |
from django.shortcuts import render, redirect, render_to_response, RequestContext
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import logout as auth_logout
from django.contrib.auth.decorators import login_required
# Create your views here.
def login(request):
return render(request, 'fb_login.html')
#@login_required(login_url='/')
#def home(request):
# return render_to_response('/start/studentmain')
def logout(request):
auth_logout(request)
return redirect('/start') | {
"content_hash": "09c00a3d8c5b021fd1deee2aae87bbdd",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 81,
"avg_line_length": 26.5,
"alnum_prop": 0.7641509433962265,
"repo_name": "molleolson/studentkompassen",
"id": "6b65ad9525b2b5a13112279cb66b8d97cf7cc089",
"size": "530",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_social_app/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10574"
},
{
"name": "HTML",
"bytes": "66683"
},
{
"name": "JavaScript",
"bytes": "34153"
},
{
"name": "Python",
"bytes": "15337"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/tatooine/shared_cantina_tatooine.iff"
result.attribute_template_id = -1
result.stfName("building_name","cantina_general")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "1fb32ab5aa03f241900652b0a8afb445",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 73,
"avg_line_length": 24.076923076923077,
"alnum_prop": 0.7060702875399361,
"repo_name": "obi-two/Rebelion",
"id": "0c4e0da525994b022867a172c3da99d0ec85f2d7",
"size": "458",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/building/tatooine/shared_cantina_tatooine.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import subprocess
import tempfile
import gdb
import pwndbg.commands
import pwndbg.vmmap
parser = argparse.ArgumentParser(description="ROP gadget search with ropper.",
epilog="Example: ropper -- --console; ropper -- --search 'mov e?x'")
parser.add_argument('argument', nargs='*', type=str,
help='Arguments to pass to ropper')
@pwndbg.commands.ArgparsedCommand(parser)
@pwndbg.commands.OnlyWithFile
def ropper(argument):
with tempfile.NamedTemporaryFile() as corefile:
# If the process is running, dump a corefile so we get actual addresses.
if pwndbg.proc.alive:
filename = corefile.name
gdb.execute('gcore %s' % filename)
else:
filename = pwndbg.proc.exe
# Build up the command line to run
cmd = ['ropper',
'--file',
filename]
cmd += argument
try:
io = subprocess.call(cmd)
except Exception:
print("Could not run ropper. Please ensure it's installed and in $PATH.")
| {
"content_hash": "8d66b75422d4634944f830b453d1110b",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 100,
"avg_line_length": 29.80952380952381,
"alnum_prop": 0.6277955271565495,
"repo_name": "0xddaa/pwndbg",
"id": "f7d51b887a520e5012678a272424c98584c7dde9",
"size": "1298",
"binary": false,
"copies": "5",
"ref": "refs/heads/stable",
"path": "pwndbg/commands/ropper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "584"
},
{
"name": "C",
"bytes": "113"
},
{
"name": "Makefile",
"bytes": "964"
},
{
"name": "Python",
"bytes": "1920581"
},
{
"name": "Shell",
"bytes": "5598"
}
],
"symlink_target": ""
} |
from castellan.common.objects import passphrase as key
from sahara.service.castellan import sahara_key_manager
from sahara.tests.unit import base
class SaharaKeyManagerTest(base.SaharaTestCase):
def setUp(self):
super(SaharaKeyManagerTest, self).setUp()
self.k_m = sahara_key_manager.SaharaKeyManager()
self.ctx = None
def test_create_key(self):
k = self.k_m.create_key(self.ctx, passphrase='super_secret')
self.assertEqual('super_secret', k.get_encoded())
k = self.k_m.create_key(self.ctx)
self.assertEqual('', k.get_encoded())
def test_store(self):
k = key.Passphrase(passphrase='super_secret')
k_id = self.k_m.store(self.ctx, k)
self.assertEqual('super_secret', k_id)
def test_get(self):
k_id = 'super_secret'
k = self.k_m.get(self.ctx, k_id)
self.assertEqual(k_id, k.get_encoded())
| {
"content_hash": "651a2d70269d26535f4b5616599e1226",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 68,
"avg_line_length": 31.517241379310345,
"alnum_prop": 0.6466083150984683,
"repo_name": "openstack/sahara",
"id": "416bc146f6b7bdfa897c046fa796a66fc2075a78",
"size": "1497",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "sahara/tests/unit/service/castellan/test_sahara_key_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "952"
},
{
"name": "Python",
"bytes": "2197746"
},
{
"name": "Shell",
"bytes": "37893"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'catalog.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
| {
"content_hash": "a58fdd9c21ab3dcacc0fcc14176ebba2",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 51,
"avg_line_length": 24.666666666666668,
"alnum_prop": 0.6554054054054054,
"repo_name": "nautilebleu/django-fdw",
"id": "4e41c5f105e88b02b4346daaea512da82bfe9969",
"size": "296",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fdw/urls.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "9477"
}
],
"symlink_target": ""
} |
from .linting import BtHighlightUnimplementedStepsEventListener
__all__ = [
'BtHighlightUnimplementedStepsEventListener'
]
| {
"content_hash": "cffe410fb78976dd515f2041e95a1726",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 63,
"avg_line_length": 25.6,
"alnum_prop": 0.8125,
"repo_name": "mixxorz/BehaveToolkit",
"id": "d13245ddd8c08f0a56dbdc9f02e5b097b8c967c9",
"size": "128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "behave_toolkit/listeners/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cucumber",
"bytes": "721"
},
{
"name": "JavaScript",
"bytes": "3391"
},
{
"name": "Python",
"bytes": "26892"
}
],
"symlink_target": ""
} |
""" config.py """
import os
from flask import Flask
from peewee import MySQLDatabase, SqliteDatabase
#-------------------------------------------------------------------------------
# Environment
#-------------------------------------------------------------------------------
DB = 'idreamoftoast'
ENV = os.environ.get('TOAST_PRODUCTION', None)
HOST = os.environ.get('TOAST_HOST', None)
USER = os.environ.get('TOAST_USER', None)
PASSWD = os.environ.get('TOAST_PASSWD', None)
LOG_PATH = os.environ.get('TOAST_LOG_PATH', './')
#-------------------------------------------------------------------------------
# Config Methods
#-------------------------------------------------------------------------------
def get_app():
app = None
# If env is set, we are in production!
if ENV:
app = Flask(__name__)
import logging
file_handler = logging.FileHandler(LOG_PATH + 'flask.log')
file_handler.setLevel(logging.WARNING)
file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
app.logger.addHandler(file_handler)
else:
# Development settings here!
app = Flask(__name__, static_folder='public', static_url_path='')
@app.route("/")
def root():
return app.send_static_file('index.html')
return app
def get_database():
db = None
# If env is set, we are in production!
if ENV:
# Production settings here!
if not (HOST or USER or PASSWD):
import sys
print('Environment variables NOT set!')
sys.exit()
db = MySQLDatabase(DB, host=HOST, user=USER, passwd=PASSWD)
else:
# Development settings here!
db = SqliteDatabase('toast.db')#, threadlocals=True)
return db
| {
"content_hash": "56b573adc565d27ddcc9c479a3362232",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 80,
"avg_line_length": 32.37931034482759,
"alnum_prop": 0.49307774227902024,
"repo_name": "whiskeylover/idreamoftoast",
"id": "86c01ecece862595599544c66dd4d6f085a1c02e",
"size": "1878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "toast/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "8266"
},
{
"name": "JavaScript",
"bytes": "4824"
},
{
"name": "Python",
"bytes": "8562"
}
],
"symlink_target": ""
} |
"""
[2016-01-27] Challenge #251 [Hard] Solve a Nonogram + Bonus
https://www.reddit.com/r/dailyprogrammer/comments/42x90t/20160127_challenge_251_hard_solve_a_nonogram_bonus/
#Description
This week we are doing a challenge involving [Nonograms](https://en.wikipedia.org/wiki/Nonogram)
It is going to be a three parter:
* [Create Nonogram description
([Easy])](https://www.reddit.com/r/dailyprogrammer/comments/42lhem/20160125_challenge_251_easy_create_nonogram/)
* [Solve Nonogram
([Intermediate/Hard])](https://www.reddit.com/r/dailyprogrammer/comments/42x90t/20160127_challenge_251_hard_solve_a_nonogram_bonus/
* Working with multiple colors/characters ([Hard])
* [Bonus: Make it an interactive game
([Intermediate])](https://www.reddit.com/r/dailyprogrammer/comments/42x90t/20160127_challenge_251_hard_solve_a_nonogram_bonus/
##What is a Nonogram?
> Nonograms, also known as Hanjie, Picross or Griddlers, are picture logic puzzles in which cells in a grid must be
colored or left blank according to numbers at the side of the grid to reveal a hidden picture. In this puzzle type, the
numbers are a form of discrete tomography that measures how many unbroken lines of filled-in squares there are in any
given row or column.
In a Nonogram you are given the number of elements in the rows and columns. A row/column where containing no element
has a '0' all other rows/columns will have at least one number.
Each number in a row/column represent sets of elements next to each other.
If a row/column have multiple sets, the declaration of that row/column will have multiple numbers. These sets will
always be at least 1 cell apart.
*An example*
| | |2|1|1| |
---|---|----|----|----|----|----
| |1|1|1|2|1
|2| |*|*| |
1|2| |*| |*|*
|0| | | | |
2|1|*|*| |*|
|2| | |*|*|
#Formal Inputs & Outputs
##Input description
Today you will recieve the columns and rows of a Nonogram seperated by a `-`
0 0 1 1 0
1 2 1 1 5
-
0 1
0 2
1 1
1 1
0 5
##Output description
The Nonogram solved like this:
*
**
* *
* *
*****
##Ins
*1*
0 0 1 1 0
1 2 1 1 5
-
0 1
0 2
1 1
1 1
0 5
*2*
0 0 0 0 0 0 4 0 0 0
0 0 3 4 5 5 2 5 0 0
1 7 1 4 4 1 1 1 7 1
-
0 0 2 1
0 0 0 5
0 0 0 6
0 0 0 8
0 0 0 10
0 0 1 1
1 2 1 1
1 2 1 1
0 1 2 1
0 0 0 8
*3*
0 0 2 0 0 0 1 0 0 0 0 0 0 0 0
0 0 3 6 0 0 4 2 0 0 1 1 1 1 0
1 10 1 2 6 15 8 9 14 8 6 10 10 11 12
-
0 0 0 3
0 0 4 2
0 0 6 6
1 4 2 1
0 6 3 2
0 0 6 7
0 0 6 8
0 0 1 10
0 0 1 10
0 0 1 10
1 1 4 4
0 3 4 4
0 0 4 4
0 0 4 4
0 0 4 4
##Notes/hints
This is a hard challenge. In the [wikipage](https://en.wikipedia.org/wiki/Nonogram) you'll find ways to find what cell
you can fill and how you can exclude cells.
#Bonus challenge
Use the inputs and output from the first challenge [Create Nonogram description
([Easy])](https://www.reddit.com/r/dailyprogrammer/comments/42lhem/20160125_challenge_251_easy_create_nonogram/) to
create a game.
Create the nonogram description fron a library (the inputs) and let the user choose a difficulty:
* Easy, the user can keep on playing, even if he makes wrong calls
* Normal, give the user some 'lives'. Everytime the user gives an incorrect guess, she/he loses a life. I would say
the user would have about number of colums added up to the number of rows lives.
* Hard, the user can't make any mistake
Now make it something beautifull, or at least playable
#Finally
Have a good challenge idea?
Consider submitting it to /r/dailyprogrammer_ideas
"""
def main():
pass
if __name__ == "__main__":
main()
| {
"content_hash": "1e7f7fe56355277c39dd919b444b46c8",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 131,
"avg_line_length": 31.357723577235774,
"alnum_prop": 0.6333938294010889,
"repo_name": "DayGitH/Python-Challenges",
"id": "7b694b803b9d708da48e77bff21701192d4fff62",
"size": "3857",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DailyProgrammer/DP20160127C.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "OpenEdge ABL",
"bytes": "5002"
},
{
"name": "Python",
"bytes": "2471582"
}
],
"symlink_target": ""
} |
__author__ = 'fcanas'
import termhelper
class Reporter:
"""
Responsible for displaying results and recording them to log files.
"""
def __init__(self, warg=0):
self.width = warg
self.set_terminal_width(warg) # default width of terminal
templates = {
'test': '[ {0:.<{2}}{1:.>{3}} ]',
'set_tuples': '{0:.<{2}}{1:.>{3}}',
'set_items': '{0:.<{1}}',
'path_display': '[ {0:.<{1}}FAIL ]'
}
def report_test(self, test, items):
"""
Report header for specification results.
"""
template = self.templates['test']
test_width = self.width - 4 # Test tuple is wrapped in [ ], which is 4 characters
print template.format(test, len(items), 0, test_width - len(test), len(test))
self.report_set(items)
def report_set(self, entities):
"""
Human-readable report for a set of unverified entities.
"""
for item in sorted(entities):
if type(item) is tuple:
template = self.templates['set_tuples']
tuple_padding = self.get_tuple_padding(item)
print template.format(item[0], item[1], tuple_padding, len(item[1]))
else:
template = self.templates['set_items']
print template.format(item, self.width)
def set_terminal_width(self, warg=-1):
"""
Overwrites default terminal width with the width of the current terminal window or the width arg passed
in by user (if any and positive).
"""
if warg <= 0:
height, width = termhelper.terminal_height_width()
self.width = max(width, 100)
def get_tuple_padding(self, item):
"""
Returns the proper length of padding for a tuple.
The first item is allocated at least 30 characters of space.
The second item is allocated up to 50 characters.
"""
# Default padding, used when both items fit in their own allocated space without shifting the padding
# and when a line break is unavoidable (default padding keeps the output more readable than no padding
# in the case of a very short first item with a very long second item)
tuple_padding = max(30, self.width - 50, len(item[0]) + 1)
# If the combined length of the items is short enough to fit on one line, avoid a line break
if len(item[0]) + len(item[1]) < self.width:
# If the first item fills up its allocated space, pad by 1 character
if len(item[0]) >= max(30, self.width - 50):
tuple_padding = len(item[0]) + 1
# If the second item is longer than its allocated space, shorten the padding to avoid a line break
elif len(item[1]) > min(self.width - 30, 50):
tuple_padding = self.width - len(item[1])
return tuple_padding
def display_paths(self, paths_dict):
"""
Human readable output for displaying dependency paths.
"""
def node_type(node):
return " (type: {0})".format(str(node[1]))
template = self.templates['path_display']
for condition_id in paths_dict:
for path_index, path in enumerate(list(paths_dict[condition_id])):
tab = len(condition_id)
for node_index, node in enumerate(path):
if node_index == 0:
if path_index == 0:
print template.format(condition_id + node_type(node), self.width - len('[ FAIL ]'))
else:
continue
else:
add_to_tab = 0
if type(node[0]) is tuple:
cid = node[0][0]
add_to_tab += len(cid)
else:
cid = node[0]
add_to_tab += len(cid)
if tab:
branch = '\--> depends on '
add_to_tab += len(branch)
else:
branch = ''
print " " * tab + branch + str(cid) + node_type(node)
tab += add_to_tab
print | {
"content_hash": "f85adf51d148243fc72e8f1dda943ab6",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 111,
"avg_line_length": 38.19298245614035,
"alnum_prop": 0.5151584749655489,
"repo_name": "izv/IzVerifier",
"id": "eeb2234b098260248b28b878dbdd02e0cc205462",
"size": "4354",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "IzVerifier/logging/reporter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "2676"
},
{
"name": "Python",
"bytes": "91289"
}
],
"symlink_target": ""
} |
from ._base import Config, ConfigError
from synapse.appservice import ApplicationService
from synapse.types import UserID
import urllib
import yaml
import logging
logger = logging.getLogger(__name__)
class AppServiceConfig(Config):
def read_config(self, config):
self.app_service_config_files = config.get("app_service_config_files", [])
self.notify_appservices = config.get("notify_appservices", True)
def default_config(cls, **kwargs):
return """\
# A list of application service config file to use
app_service_config_files: []
"""
def load_appservices(hostname, config_files):
"""Returns a list of Application Services from the config files."""
if not isinstance(config_files, list):
logger.warning(
"Expected %s to be a list of AS config files.", config_files
)
return []
# Dicts of value -> filename
seen_as_tokens = {}
seen_ids = {}
appservices = []
for config_file in config_files:
try:
with open(config_file, 'r') as f:
appservice = _load_appservice(
hostname, yaml.load(f), config_file
)
if appservice.id in seen_ids:
raise ConfigError(
"Cannot reuse ID across application services: "
"%s (files: %s, %s)" % (
appservice.id, config_file, seen_ids[appservice.id],
)
)
seen_ids[appservice.id] = config_file
if appservice.token in seen_as_tokens:
raise ConfigError(
"Cannot reuse as_token across application services: "
"%s (files: %s, %s)" % (
appservice.token,
config_file,
seen_as_tokens[appservice.token],
)
)
seen_as_tokens[appservice.token] = config_file
logger.info("Loaded application service: %s", appservice)
appservices.append(appservice)
except Exception as e:
logger.error("Failed to load appservice from '%s'", config_file)
logger.exception(e)
raise
return appservices
def _load_appservice(hostname, as_info, config_filename):
required_string_fields = [
"id", "as_token", "hs_token", "sender_localpart"
]
for field in required_string_fields:
if not isinstance(as_info.get(field), basestring):
raise KeyError("Required string field: '%s' (%s)" % (
field, config_filename,
))
# 'url' must either be a string or explicitly null, not missing
# to avoid accidentally turning off push for ASes.
if (not isinstance(as_info.get("url"), basestring) and
as_info.get("url", "") is not None):
raise KeyError(
"Required string field or explicit null: 'url' (%s)" % (config_filename,)
)
localpart = as_info["sender_localpart"]
if urllib.quote(localpart) != localpart:
raise ValueError(
"sender_localpart needs characters which are not URL encoded."
)
user = UserID(localpart, hostname)
user_id = user.to_string()
# Rate limiting for users of this AS is on by default (excludes sender)
rate_limited = True
if isinstance(as_info.get("rate_limited"), bool):
rate_limited = as_info.get("rate_limited")
# namespace checks
if not isinstance(as_info.get("namespaces"), dict):
raise KeyError("Requires 'namespaces' object.")
for ns in ApplicationService.NS_LIST:
# specific namespaces are optional
if ns in as_info["namespaces"]:
# expect a list of dicts with exclusive and regex keys
for regex_obj in as_info["namespaces"][ns]:
if not isinstance(regex_obj, dict):
raise ValueError(
"Expected namespace entry in %s to be an object,"
" but got %s", ns, regex_obj
)
if not isinstance(regex_obj.get("regex"), basestring):
raise ValueError(
"Missing/bad type 'regex' key in %s", regex_obj
)
if not isinstance(regex_obj.get("exclusive"), bool):
raise ValueError(
"Missing/bad type 'exclusive' key in %s", regex_obj
)
# protocols check
protocols = as_info.get("protocols")
if protocols:
# Because strings are lists in python
if isinstance(protocols, str) or not isinstance(protocols, list):
raise KeyError("Optional 'protocols' must be a list if present.")
for p in protocols:
if not isinstance(p, str):
raise KeyError("Bad value for 'protocols' item")
if as_info["url"] is None:
logger.info(
"(%s) Explicitly empty 'url' provided. This application service"
" will not receive events or queries.",
config_filename,
)
return ApplicationService(
token=as_info["as_token"],
url=as_info["url"],
namespaces=as_info["namespaces"],
hs_token=as_info["hs_token"],
sender=user_id,
id=as_info["id"],
protocols=protocols,
rate_limited=rate_limited
)
| {
"content_hash": "ada3a1fae90e24a74ba6f500e534aa6d",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 85,
"avg_line_length": 36.88666666666666,
"alnum_prop": 0.5534068317368516,
"repo_name": "TribeMedia/synapse",
"id": "82c50b82404557f3c70bdec27c466b5446de117e",
"size": "6118",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "synapse/config/appservice.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4376"
},
{
"name": "HTML",
"bytes": "9046"
},
{
"name": "JavaScript",
"bytes": "176441"
},
{
"name": "Perl",
"bytes": "31852"
},
{
"name": "Python",
"bytes": "2748398"
},
{
"name": "Shell",
"bytes": "7827"
}
],
"symlink_target": ""
} |
from .resource import Resource
class NamespaceResource(Resource):
"""Description of a Namespace resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Resource location
:type location: str
:param tags: Resource tags
:type tags: dict
:param sku: The sku of the created namespace
:type sku: :class:`Sku <azure.mgmt.notificationhubs.models.Sku>`
:param namespace_resource_name: The name of the namespace.
:type namespace_resource_name: str
:param provisioning_state: Provisioning state of the Namespace.
:type provisioning_state: str
:param region: Specifies the targeted region in which the namespace should
be created. It can be any of the following values: Australia EastAustralia
SoutheastCentral USEast USEast US 2West USNorth Central USSouth Central
USEast AsiaSoutheast AsiaBrazil SouthJapan EastJapan WestNorth EuropeWest
Europe
:type region: str
:param status: Status of the namespace. It can be any of these values:1 =
Created/Active2 = Creating3 = Suspended4 = Deleting
:type status: str
:param created_at: The time the namespace was created.
:type created_at: datetime
:param service_bus_endpoint: Endpoint you can use to perform
NotificationHub operations.
:type service_bus_endpoint: str
:param subscription_id: The Id of the Azure subscription associated with
the namespace.
:type subscription_id: str
:param scale_unit: ScaleUnit where the namespace gets created
:type scale_unit: str
:param enabled: Whether or not the namespace is currently enabled.
:type enabled: bool
:param critical: Whether or not the namespace is set as Critical.
:type critical: bool
:param namespace_type: The namespace type. Possible values include:
'Messaging', 'NotificationHub'
:type namespace_type: str or :class:`NamespaceType
<azure.mgmt.notificationhubs.models.NamespaceType>`
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'namespace_resource_name': {'key': 'properties.name', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'region': {'key': 'properties.region', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
'service_bus_endpoint': {'key': 'properties.serviceBusEndpoint', 'type': 'str'},
'subscription_id': {'key': 'properties.subscriptionId', 'type': 'str'},
'scale_unit': {'key': 'properties.scaleUnit', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'critical': {'key': 'properties.critical', 'type': 'bool'},
'namespace_type': {'key': 'properties.namespaceType', 'type': 'NamespaceType'},
}
def __init__(self, location, tags=None, sku=None, namespace_resource_name=None, provisioning_state=None, region=None, status=None, created_at=None, service_bus_endpoint=None, subscription_id=None, scale_unit=None, enabled=None, critical=None, namespace_type=None):
super(NamespaceResource, self).__init__(location=location, tags=tags, sku=sku)
self.namespace_resource_name = namespace_resource_name
self.provisioning_state = provisioning_state
self.region = region
self.status = status
self.created_at = created_at
self.service_bus_endpoint = service_bus_endpoint
self.subscription_id = subscription_id
self.scale_unit = scale_unit
self.enabled = enabled
self.critical = critical
self.namespace_type = namespace_type
| {
"content_hash": "0d11b0d6a24d6fa3da5ec74acf27e49d",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 268,
"avg_line_length": 45.819148936170215,
"alnum_prop": 0.6524262827954492,
"repo_name": "AutorestCI/azure-sdk-for-python",
"id": "93eb3b0ee4e8f29e9bce16518d509b988f6a8bac",
"size": "4781",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "azure-mgmt-notificationhubs/azure/mgmt/notificationhubs/models/namespace_resource.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34619070"
}
],
"symlink_target": ""
} |
from case9_4_1 import Case9_4_1
class Case9_4_6(Case9_4_1):
DESCRIPTION = """Send fragmented binary message message with message payload of length 4 * 2**20 (4M). Sent out in fragments of 64k."""
EXPECTATION = """Receive echo'ed binary message (with payload as sent)."""
def init(self):
self.DATALEN = 4 * 2**20
self.FRAGSIZE = 64 * 2**10
self.PAYLOAD = "*" * self.DATALEN
self.WAITSECS = 100
self.reportTime = True
| {
"content_hash": "42860066241e7d8a4bbe84f5605e9cc0",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 138,
"avg_line_length": 31.666666666666668,
"alnum_prop": 0.6210526315789474,
"repo_name": "crossbario/autobahn-testsuite",
"id": "6349a985d2d070e24da93098d528e1dacc860b77",
"size": "1273",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "autobahntestsuite/autobahntestsuite/case/case9_4_6.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1585"
},
{
"name": "Dockerfile",
"bytes": "1320"
},
{
"name": "HTML",
"bytes": "52193"
},
{
"name": "JavaScript",
"bytes": "6502"
},
{
"name": "Makefile",
"bytes": "3646"
},
{
"name": "Python",
"bytes": "515213"
},
{
"name": "Shell",
"bytes": "606"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import patterns, url
import views
urlpatterns = patterns('',
url(r'^vcal/$', views.vcal, name='vcal.vcal'),
url(r'^My-Sheriff-duty.ics$', views.my_dates, name='vcal.my_dates'),
url(r'^Sheriff-Duty.ics$', views.all_dates, name='vcal.all_dates'),
)
| {
"content_hash": "ec92e59216975741a8af7eee469620af",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 72,
"avg_line_length": 36.375,
"alnum_prop": 0.6735395189003437,
"repo_name": "mozilla/sheriffs",
"id": "052e8bee8d401263ee86dd3c9a81be936207bb8a",
"size": "1939",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "apps/vcal/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "D",
"bytes": "2108471"
},
{
"name": "JavaScript",
"bytes": "19192"
},
{
"name": "Python",
"bytes": "1272000"
},
{
"name": "Shell",
"bytes": "2880"
}
],
"symlink_target": ""
} |
from django.core.management.base import BaseCommand
from mongoengine.django.auth import User
class Command(BaseCommand):
def _get_string(self, prompt, reader_func=raw_input):
"""Helper method to get a non-empty string.
"""
string = ''
while not string:
string = reader_func(prompt + ': ')
return string
def handle(self, **kwargs):
username = self._get_string('Username')
user = User.objects(username=username).first()
if user:
user.delete()
print 'User "%s %s" successfully removed' % (user.first_name,
user.last_name)
else:
print 'Error! Could not find user with username "%s"' % username
| {
"content_hash": "b966b37d891096e877a95d886ab756bb",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 76,
"avg_line_length": 32.708333333333336,
"alnum_prop": 0.554140127388535,
"repo_name": "MechanisM/django-mumblr",
"id": "85e5a595a31b99271a4fbb814996292056d160c4",
"size": "785",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "mumblr/management/commands/rmuser.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
import os
import json
import sys
import six
from six import StringIO
from pylint.reporters.json import JSONReporter
from pylint.lint import Run
from conans.client.output import Color
from conans.errors import ConanException
def conan_linter(conanfile_path, out):
apply_lint = os.environ.get("CONAN_RECIPE_LINTER", True)
if not apply_lint or apply_lint == "False":
return
try:
dirname = os.path.dirname(conanfile_path)
sys.path.append(dirname)
py3_msgs = _lint_py3(conanfile_path)
if py3_msgs:
out.writeln("Python 3 incompatibilities\n ERROR: %s"
% "\n ERROR: ".join(py3_msgs),
front=Color.BRIGHT_MAGENTA)
msgs = _normal_linter(conanfile_path)
if msgs:
out.writeln("Linter warnings\n WARN: %s" % "\n WARN: ".join(msgs),
front=Color.MAGENTA)
pylint_werr = os.environ.get("CONAN_PYLINT_WERR", None)
if pylint_werr and (py3_msgs or msgs):
raise ConanException("Package recipe has linter errors. Please fix them.")
finally:
sys.path.pop()
class _WritableObject(object):
def __init__(self):
self.content = []
def write(self, st):
self.content.append(st)
def _runner(args):
try:
output = _WritableObject()
stdout_ = sys.stderr
stream = StringIO()
sys.stderr = stream
Run(args, reporter=JSONReporter(output), exit=False)
finally:
sys.stderr = stdout_
try:
output = "".join(output.content)
return json.loads(output)
except ValueError:
return []
def _lint_py3(conanfile_path):
if six.PY3:
return
args = ['--py3k', "--reports=no", "--disable=no-absolute-import", "--persistent=no",
conanfile_path]
output_json = _runner(args)
result = []
for msg in output_json:
if msg.get("type") in ("warning", "error"):
result.append("Py3 incompatibility. Line %s: %s"
% (msg.get("line"), msg.get("message")))
return result
def _normal_linter(conanfile_path):
args = ["--reports=no", "--disable=no-absolute-import", "--persistent=no", conanfile_path]
pylintrc = os.environ.get("CONAN_PYLINTRC", None)
if pylintrc:
if not os.path.exists(pylintrc):
raise ConanException("File %s defined by PYLINTRC doesn't exist" % pylintrc)
args.append('--rcfile=%s' % pylintrc)
output_json = _runner(args)
dynamic_fields = ("source_folder", "build_folder", "package_folder", "info_build",
"build_requires", "info")
def _accept_message(msg):
symbol = msg.get("symbol")
text = msg.get("message")
if symbol == "no-member":
for field in dynamic_fields:
if field in text:
return False
if symbol == "not-callable" and "self.copy is not callable" == text:
return False
if symbol in ("bare-except", "broad-except"): # No exception type(s) specified
return False
return True
result = []
for msg in output_json:
if msg.get("type") in ("warning", "error"):
if _accept_message(msg):
result.append("Linter. Line %s: %s" % (msg.get("line"), msg.get("message")))
return result
| {
"content_hash": "c2d302d604e5676fd09f3a5b25b59593",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 94,
"avg_line_length": 30.936363636363637,
"alnum_prop": 0.5783132530120482,
"repo_name": "tivek/conan",
"id": "1520805a94a6046c97ead99d93548431f66515d0",
"size": "3403",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "conans/client/cmd/export_linter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1100"
},
{
"name": "Groovy",
"bytes": "6080"
},
{
"name": "Python",
"bytes": "2456395"
},
{
"name": "Shell",
"bytes": "1864"
}
],
"symlink_target": ""
} |
"""
requests.hooks
~~~~~~~~~~~~~~
This module provides the capabilities for the Requests hooks system.
Available hooks:
``args``:
A dictionary of the arguments being sent to Request().
``pre_request``:
The Request object, directly after being created.
``pre_send``:
The Request object, directly before being sent.
``post_request``:
The Request object, directly after being sent.
``response``:
The response generated from a Request.
"""
import traceback
HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response')
def dispatch_hook(key, hooks, hook_data):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
if key in hooks:
hooks = hooks.get(key)
if hasattr(hooks, '__call__'):
hooks = [hooks]
for hook in hooks:
try:
_hook_data = hook(hook_data)
if _hook_data is not None:
hook_data = _hook_data
except Exception:
traceback.print_exc()
return hook_data
| {
"content_hash": "b78d0b92b84874c706a5630adbcb4d5c",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 71,
"avg_line_length": 21.19607843137255,
"alnum_prop": 0.5948196114708603,
"repo_name": "katiecheng/Bombolone",
"id": "272abb73a6c8ea854f0388e4cbb3f56686f988e7",
"size": "1106",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "env/lib/python2.7/site-packages/requests/hooks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "107071"
},
{
"name": "JavaScript",
"bytes": "38358"
},
{
"name": "Python",
"bytes": "7381383"
},
{
"name": "Ruby",
"bytes": "1502"
},
{
"name": "Shell",
"bytes": "3839"
}
],
"symlink_target": ""
} |
from __future__ import division
import six
import chainer
from chainer.backends import cuda
from chainer import function
from chainer.utils import type_check
def _fbeta_score(precision, recall, beta):
beta_square = beta * beta
return ((1 + beta_square) * precision * recall /
(beta_square * precision + recall)).astype(precision.dtype)
class ClassificationSummary(function.Function):
def __init__(self, label_num, beta, ignore_label):
self.label_num = label_num
self.beta = beta
self.ignore_label = ignore_label
def check_type_forward(self, in_types):
type_check.argname(in_types, ('x', 't'))
x_type, t_type = in_types
type_check.expect(
x_type.dtype.kind == 'f',
t_type.dtype.kind == 'i'
)
t_ndim = type_check.eval(t_type.ndim)
type_check.expect(
x_type.ndim >= t_type.ndim,
x_type.shape[0] == t_type.shape[0],
x_type.shape[2: t_ndim + 1] == t_type.shape[1:]
)
for i in six.moves.range(t_ndim + 1, type_check.eval(x_type.ndim)):
type_check.expect(x_type.shape[i] == 1)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# numpy.bincount requires int32 on Windows
t = t.astype('i', copy=False)
if self.label_num is None:
label_num = xp.amax(t) + 1
else:
label_num = self.label_num
if chainer.is_debug():
assert (t < label_num).all()
mask = (t == self.ignore_label).ravel()
pred = xp.where(mask, label_num, y.argmax(axis=1).ravel())
true = xp.where(mask, label_num, t.ravel())
support = xp.bincount(true, minlength=label_num + 1)[:label_num]
relevant = xp.bincount(pred, minlength=label_num + 1)[:label_num]
tp_mask = xp.where(pred == true, true, label_num)
tp = xp.bincount(tp_mask, minlength=label_num + 1)[:label_num]
precision = tp / relevant
recall = tp / support
fbeta = _fbeta_score(precision, recall, self.beta)
return precision, recall, fbeta, support
def classification_summary(y, t, label_num=None, beta=1.0, ignore_label=-1):
"""Calculates Precision, Recall, F beta Score, and support.
This function calculates the following quantities for each class.
- Precision: :math:`\\frac{\\mathrm{tp}}{\\mathrm{tp} + \\mathrm{fp}}`
- Recall: :math:`\\frac{\\mathrm{tp}}{\\mathrm{tp} + \\mathrm{tn}}`
- F beta Score: The weighted harmonic average of Precision and Recall.
- Support: The number of instances of each ground truth label.
Here, ``tp``, ``fp``, and ``tn`` stand for the number of true positives,
false positives, and true negative, respectively.
``label_num`` specifies the number of classes, that is,
each value in ``t`` must be an integer in the range of
``[0, label_num)``.
If ``label_num`` is ``None``, this function regards
``label_num`` as a maximum of in ``t`` plus one.
``ignore_label`` determines which instances should be ignored.
Specifically, instances with the given label are not taken
into account for calculating the above quantities.
By default, it is set to -1 so that all instances are taken
into consideration, as labels are supposed to be non-negative integers.
Setting ``ignore_label`` to a non-negative integer less than ``label_num``
is illegal and yields undefined behavior. In the current implementation,
it arises ``RuntimeWarning`` and ``ignore_label``-th entries in output
arrays do not contain correct quantities.
Args:
y (~chainer.Variable): Variable holding a vector of scores.
t (~chainer.Variable): Variable holding a vector of
ground truth labels.
label_num (int): The number of classes.
beta (float): The parameter which determines the weight of
precision in the F-beta score.
ignore_label (int): Instances with this label are ignored.
Returns:
4-tuple of ~chainer.Variable of size ``(label_num,)``.
Each element represents precision, recall, F beta score,
and support of this minibatch.
"""
return ClassificationSummary(label_num, beta, ignore_label)(y, t)
def precision(y, t, label_num=None, ignore_label=-1):
ret = ClassificationSummary(label_num, 1.0, ignore_label)(y, t)
return ret[0], ret[-1]
def recall(y, t, label_num=None, ignore_label=-1):
ret = ClassificationSummary(label_num, 1.0, ignore_label)(y, t)
return ret[1], ret[-1]
def fbeta_score(y, t, label_num=None, beta=1.0, ignore_label=-1):
ret = ClassificationSummary(label_num, beta, ignore_label)(y, t)
return ret[2], ret[-1]
def f1_score(y, t, label_num=None, ignore_label=-1):
ret = ClassificationSummary(label_num, 1.0, ignore_label)(y, t)
return ret[2], ret[-1]
| {
"content_hash": "e0c3891ebc994a1308c9b1c3bed834ee",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 78,
"avg_line_length": 36.88805970149254,
"alnum_prop": 0.630386405017196,
"repo_name": "rezoo/chainer",
"id": "70c0b58a1faa8d3c475e5904dc9962561914ce46",
"size": "4943",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chainer/functions/evaluation/classification_summary.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3368"
},
{
"name": "Dockerfile",
"bytes": "1238"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "4367165"
}
],
"symlink_target": ""
} |
import requests
import json
import time
def action(**kwargs):
''' This method is called to action a reaction '''
redata = kwargs['redata']
jdata = kwargs['jdata']
logger = kwargs['logger']
run = True
# Check for Trigger
if redata['trigger'] > jdata['failcount']:
run = False
# Check for lastrun
checktime = time.time() - float(redata['lastrun'])
if checktime < redata['frequency']:
run = False
if redata['data']['call_on'] not in jdata['check']['status']:
run = False
if run:
return call_action(redata, jdata, logger)
else:
return None
def call_action(redata, jdata, logger):
''' Perform actual call '''
# Authenticate with Rackspace ID service
headers = {'Content-Type': 'application/json'}
authmsg = {
"auth": {
"RAX-KSKEY:apiKeyCredentials": {
"username": redata['data']['username'],
"apiKey": redata['data']['apikey']
}
}
}
payload = json.dumps(authmsg)
url = "https://identity.api.rackspacecloud.com/v2.0/tokens"
try:
req = requests.post(
url, timeout=10.0, data=payload, headers=headers, verify=True)
retdata = json.loads(req.text)
# Check Status code and grab required fields from auth data
if req.status_code == 200:
token = retdata['access']['token']['id']
for catalog in retdata['access']['serviceCatalog']:
if catalog['name'] == redata['data']['resource_type']:
for endpoint in catalog['endpoints']:
if endpoint['region'] == redata['data']['region']:
url = endpoint['publicURL'] + "/servers/" + redata['data']['serverid'] + "/action"
# Send Reboot Request
headers = {
"X-Auth-Token": token,
"Content-Type": "application/json"
}
msg = {
"reboot": {
"type": "SOFT"
}
}
payload = json.dumps(msg)
try:
req = requests.post(
url, timeout=10.0, data=payload,
headers=headers, verify=True)
except:
line = "rackspace-reboot: False Rackspace API Call for reaction %s" % (redata['id'])
logger.info(line)
return False
else:
line = "rackspace-reboot: False Rackspace Authenticaiton for reaction %s" % (redata['id'])
logger.info(line)
return False
except:
line = "rackspace-reboot: False Rackspace Authenticaiton for reaction %s" % (redata['id'])
logger.info(line)
return False
if req.status_code == 202:
line = "rackspace-reboot: Reqeust to %s sent for monitor %s - Successful" % (url, jdata['cid'])
logger.info(line)
return True
else:
line = "rackspace-reboot: Request to %s sent for monitor %s - False" % (url, jdata['cid'])
logger.info(line)
return False
| {
"content_hash": "a6cc4e2950fffd7249e3d094512a3ad3",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 110,
"avg_line_length": 34.74444444444445,
"alnum_prop": 0.5302206587783819,
"repo_name": "Runbook/runbook",
"id": "15e626126121e65c5b93d39063759ed756be11f9",
"size": "3397",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "src/actions/actions/rackspace-reboot/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "17816"
},
{
"name": "HTML",
"bytes": "227999"
},
{
"name": "JavaScript",
"bytes": "4250"
},
{
"name": "Python",
"bytes": "754910"
},
{
"name": "Shell",
"bytes": "5859"
}
],
"symlink_target": ""
} |
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import json
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/version -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_version
short_description: Return the current openshift version
description:
- Return the openshift installed version. `oc version`
options:
state:
description:
- Currently list is only supported state.
required: true
default: list
choices: ["list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: False
aliases: []
author:
- "Kenny Woodson <kwoodson@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
oc_version:
- name: get oc version
oc_version:
register: oc_version
'''
# -*- -*- -*- End included fragment: doc/version -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# pylint: disable=undefined-variable,missing-docstring
# noqa: E301,E302
class YeditException(Exception):
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods
class Yedit(object):
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z%s/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@separator.setter
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key % ''.join(common_separators), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key % ''.join(common_separators), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
yfd.write(contents)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.safe_load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. %s' % err)
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in ' +
'dict with non-dict type. value=[%s] [%s]' % (value, type(value))) # noqa: E501
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.load(invalue)
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[%s] vtype=[%s]'
% (inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# If vtype is not str then go ahead and attempt to yaml load it.
if isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming ' +
'value. value=[%s] vtype=[%s]'
% (type(inc_value), vtype))
return inc_value
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(module):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=module.params['src'],
backup=module.params['backup'],
separator=module.params['separator'])
if module.params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and \
module.params['state'] != 'present':
return {'failed': True,
'msg': 'Error opening file [%s]. Verify that the ' +
'file exists, that it is has correct' +
' permissions, and is valid yaml.'}
if module.params['state'] == 'list':
if module.params['content']:
content = Yedit.parse_value(module.params['content'],
module.params['content_type'])
yamlfile.yaml_dict = content
if module.params['key']:
rval = yamlfile.get(module.params['key']) or {}
return {'changed': False, 'result': rval, 'state': "list"}
elif module.params['state'] == 'absent':
if module.params['content']:
content = Yedit.parse_value(module.params['content'],
module.params['content_type'])
yamlfile.yaml_dict = content
if module.params['update']:
rval = yamlfile.pop(module.params['key'],
module.params['value'])
else:
rval = yamlfile.delete(module.params['key'])
if rval[0] and module.params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': "absent"}
elif module.params['state'] == 'present':
# check if content is different than what is in the file
if module.params['content']:
content = Yedit.parse_value(module.params['content'],
module.params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
module.params['value'] is None:
return {'changed': False,
'result': yamlfile.yaml_dict,
'state': "present"}
yamlfile.yaml_dict = content
# we were passed a value; parse it
if module.params['value']:
value = Yedit.parse_value(module.params['value'],
module.params['value_type'])
key = module.params['key']
if module.params['update']:
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(Yedit.parse_value(module.params['curr_value']), # noqa: E501
module.params['curr_value_format']) # noqa: E501
rval = yamlfile.update(key, value, module.params['index'], curr_value) # noqa: E501
elif module.params['append']:
rval = yamlfile.append(key, value)
else:
rval = yamlfile.put(key, value)
if rval[0] and module.params['src']:
yamlfile.write()
return {'changed': rval[0],
'result': rval[1], 'state': "present"}
# no edits to make
if module.params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': "present"}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, rname, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource, rname]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % selector)
elif rname:
cmd.append(rname)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
rval = {}
results = ''
err = None
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"results": results,
"cmd": ' '.join(cmds)}
if returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.args:
err = err.args
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {}})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(contents)
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
versions_dict[tech + '_numeric'] = version[1:].split('+')[0]
# "v3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = version[1:4]
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import yum
yum_base = yum.YumBase()
if yum_base.rpmdb.searchNevra(name='atomic-openshift'):
return True
return False
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self):
'''return all options as a string'''
return self.stringify()
def stringify(self):
''' return the options hash as cli params in a string '''
rval = []
for key, data in self.config_options.items():
if data['include'] \
and (data['value'] or isinstance(data['value'], int)):
rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_version.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class OCVersion(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
# pylint allows 5
# pylint: disable=too-many-arguments
def __init__(self,
config,
debug):
''' Constructor for OCVersion '''
super(OCVersion, self).__init__(None, config)
self.debug = debug
def get(self):
'''get and return version information '''
results = {}
version_results = self._version()
if version_results['returncode'] == 0:
filtered_vers = Utils.filter_versions(version_results['results'])
custom_vers = Utils.add_custom_versions(filtered_vers)
results['returncode'] = version_results['returncode']
results.update(filtered_vers)
results.update(custom_vers)
return results
raise OpenShiftCLIError('Problem detecting openshift version.')
@staticmethod
def run_ansible(params):
'''run the idempotent ansible code'''
oc_version = OCVersion(params['kubeconfig'], params['debug'])
if params['state'] == 'list':
#pylint: disable=protected-access
result = oc_version.get()
return {'state': params['state'],
'results': result,
'changed': False}
# -*- -*- -*- End included fragment: class/oc_version.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_version.py -*- -*- -*-
def main():
''' ansible oc module for version '''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='list', type='str',
choices=['list']),
debug=dict(default=False, type='bool'),
),
supports_check_mode=True,
)
rval = OCVersion.run_ansible(module.params)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_version.py -*- -*- -*-
| {
"content_hash": "1c36a27721de21a3a201ddc2156e9677",
"timestamp": "",
"source": "github",
"line_count": 1377,
"max_line_length": 118,
"avg_line_length": 33.40885984023239,
"alnum_prop": 0.5256934179636553,
"repo_name": "joelddiaz/openshift-tools",
"id": "37803cc0c1f57d7e5502cf1973e2ea6c6ddbee3a",
"size": "47166",
"binary": false,
"copies": "12",
"ref": "refs/heads/prod",
"path": "openshift/installer/vendored/openshift-ansible-3.4.40/roles/lib_openshift/library/oc_version.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "588"
},
{
"name": "Go",
"bytes": "382164"
},
{
"name": "Groovy",
"bytes": "6322"
},
{
"name": "HTML",
"bytes": "102550"
},
{
"name": "JavaScript",
"bytes": "1580"
},
{
"name": "Makefile",
"bytes": "3324"
},
{
"name": "PHP",
"bytes": "35793"
},
{
"name": "Python",
"bytes": "27782442"
},
{
"name": "Shell",
"bytes": "1378642"
},
{
"name": "Vim script",
"bytes": "1836"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division
import os
import sys
import stat
import math
import time
from datetime import datetime
try:
import cPickle as pickle
except ImportError:
import pickle
from mapproxy.layer import map_extent_from_grid
import logging
log = logging.getLogger(__name__)
class bidict(dict):
"""
Simplest bi-directional dictionary.
"""
def __init__(self, iterator):
for key, val in iterator:
dict.__setitem__(self, key, val)
dict.__setitem__(self, val, key)
class ETA(object):
def __init__(self):
self.avgs = []
self.last_tick_start = time.time()
self.progress = 0.0
self.ticks = 10000
self.tick_duration_sums = 0.0
self.tick_duration_divisor = 0.0
self.tick_count = 0
def update(self, progress):
self.progress = progress
missing_ticks = (self.progress * self.ticks) - self.tick_count
if missing_ticks:
tick_duration = (time.time() - self.last_tick_start) / missing_ticks
while missing_ticks > 0:
# reduce the influence of older messurements
self.tick_duration_sums *= 0.999
self.tick_duration_divisor *= 0.999
self.tick_count += 1
self.tick_duration_sums += tick_duration
self.tick_duration_divisor += 1
missing_ticks -= 1
self.last_tick_start = time.time()
def eta_string(self):
timestamp = self.eta()
if timestamp is None:
return 'N/A'
try:
return time.strftime('%Y-%m-%d-%H:%M:%S', time.localtime(timestamp))
except ValueError:
# raised when time is out of range (e.g. year >2038)
return 'N/A'
def eta(self):
if not self.tick_count: return
return (self.last_tick_start +
((self.tick_duration_sums/self.tick_duration_divisor)
* (self.ticks - self.tick_count)))
def __str__(self):
return self.eta_string()
class ProgressStore(object):
"""
Reads and stores seed progresses to a file.
"""
def __init__(self, filename=None, continue_seed=True):
self.filename = filename
if continue_seed:
self.status = self.load()
else:
self.status = {}
def load(self):
if not os.path.exists(self.filename):
pass
elif os.stat(self.filename).st_mode & stat.S_IWOTH:
log.error('progress file (%s) is world writable, ignoring file',
self.filename)
else:
with open(self.filename, 'rb') as f:
try:
return pickle.load(f)
except (pickle.UnpicklingError, AttributeError,
EOFError, ImportError, IndexError):
log.error('unable to read progress file (%s), ignoring file',
self.filename)
return {}
def write(self):
try:
with open(self.filename + '.tmp', 'wb') as f:
f.write(pickle.dumps(self.status))
f.flush()
os.fsync(f.fileno())
os.rename(self.filename + '.tmp', self.filename)
except (IOError, OSError) as ex:
log.error('unable to write seed progress: %s', ex)
def remove(self):
self.status = {}
if os.path.exists(self.filename):
os.remove(self.filename)
def get(self, task_identifier):
return self.status.get(task_identifier, None)
def add(self, task_identifier, progress_identifier):
self.status[task_identifier] = progress_identifier
class ProgressLog(object):
def __init__(self, out=None, silent=False, verbose=True, progress_store=None):
if not out:
out = sys.stdout
self.out = out
self.lastlog = time.time()
self.verbose = verbose
self.silent = silent
self.current_task_id = None
self.progress_store = progress_store
def log_message(self, msg):
self.out.write('[%s] %s\n' % (
timestamp(), msg,
))
self.out.flush()
def log_step(self, progress):
if not self.verbose:
return
if (self.lastlog + .1) < time.time():
# log progress at most every 100ms
self.out.write('[%s] %6.2f%%\t%-20s ETA: %s\r' % (
timestamp(), progress.progress*100, progress.progress_str,
progress.eta
))
self.out.flush()
self.lastlog = time.time()
def log_progress(self, progress, level, bbox, tiles):
if self.progress_store and self.current_task_id:
self.progress_store.add(self.current_task_id,
progress.current_progress_identifier())
self.progress_store.write()
if self.silent:
return
self.out.write('[%s] %2s %6.2f%% %s (%d tiles) ETA: %s\n' % (
timestamp(), level, progress.progress*100,
format_bbox(bbox), tiles, progress.eta))
self.out.flush()
def limit_sub_bbox(bbox, sub_bbox):
"""
>>> limit_sub_bbox((0, 1, 10, 11), (-1, -1, 9, 8))
(0, 1, 9, 8)
>>> limit_sub_bbox((0, 0, 10, 10), (5, 2, 18, 18))
(5, 2, 10, 10)
"""
minx = max(bbox[0], sub_bbox[0])
miny = max(bbox[1], sub_bbox[1])
maxx = min(bbox[2], sub_bbox[2])
maxy = min(bbox[3], sub_bbox[3])
return minx, miny, maxx, maxy
def timestamp():
return datetime.now().strftime('%H:%M:%S')
def format_bbox(bbox):
return ('%.5f, %.5f, %.5f, %.5f') % tuple(bbox)
def status_symbol(i, total):
"""
>>> status_symbol(0, 1)
'0'
>>> [status_symbol(i, 4) for i in range(5)]
['.', 'o', 'O', '0', 'X']
>>> [status_symbol(i, 10) for i in range(11)]
['.', '.', 'o', 'o', 'o', 'O', 'O', '0', '0', '0', 'X']
"""
symbols = list(' .oO0')
i += 1
if 0 < i > total:
return 'X'
else:
return symbols[int(math.ceil(i/(total/4)))]
class BackoffError(Exception):
pass
def exp_backoff(func, args=(), kw={}, max_repeat=10, start_backoff_sec=2,
exceptions=(Exception,), ignore_exceptions=tuple(), max_backoff=60):
n = 0
while True:
try:
result = func(*args, **kw)
except ignore_exceptions:
time.sleep(0.01)
except exceptions as ex:
if n >= max_repeat:
print >>sys.stderr, "An error occured. Giving up"
raise BackoffError
wait_for = start_backoff_sec * 2**n
if wait_for > max_backoff:
wait_for = max_backoff
print("An error occured. Retry in %d seconds: %r. Retries left: %d" %
(wait_for, ex, (max_repeat - n)), file=sys.stderr)
time.sleep(wait_for)
n += 1
else:
return result
def format_seed_task(task):
info = []
info.append(' %s:' % (task.md['name'], ))
if task.coverage is False:
info.append(" Empty coverage given for this task")
info.append(" Skipped")
return '\n'.join(info)
info.append(" Seeding cache '%s' with grid '%s' in %s" % (
task.md['cache_name'], task.md['grid_name'], task.grid.srs.srs_code))
if task.coverage:
info.append(' Limited to coverage in: %s (EPSG:4326)' % (format_bbox(task.coverage.extent.llbbox), ))
else:
info.append(' Complete grid: %s (EPSG:4326)' % (format_bbox(map_extent_from_grid(task.grid).llbbox), ))
info.append(' Levels: %s' % (task.levels, ))
if task.refresh_timestamp:
info.append(' Overwriting: tiles older than %s' %
datetime.fromtimestamp(task.refresh_timestamp))
elif task.refresh_timestamp == 0:
info.append(' Overwriting: all tiles')
else:
info.append(' Overwriting: no tiles')
return '\n'.join(info)
def format_cleanup_task(task):
info = []
info.append(' %s:' % (task.md['name'], ))
if task.coverage is False:
info.append(" Empty coverage given for this task")
info.append(" Skipped")
return '\n'.join(info)
info.append(" Cleaning up cache '%s' with grid '%s' in %s" % (
task.md['cache_name'], task.md['grid_name'], task.grid.srs.srs_code))
if task.coverage:
info.append(' Limited to coverage in: %s (EPSG:4326)' % (format_bbox(task.coverage.extent.llbbox), ))
else:
info.append(' Complete grid: %s (EPSG:4326)' % (format_bbox(map_extent_from_grid(task.grid).llbbox), ))
info.append(' Levels: %s' % (task.levels, ))
if task.remove_timestamp:
info.append(' Remove: tiles older than %s' %
datetime.fromtimestamp(task.remove_timestamp))
else:
info.append(' Remove: all tiles')
return '\n'.join(info)
| {
"content_hash": "506c2c0fc2db608e9fe6388e7ee9265f",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 114,
"avg_line_length": 32.17921146953405,
"alnum_prop": 0.5457785698373803,
"repo_name": "kaiCu/mapproxy",
"id": "48989980b2c688267b073594fae3478ad2f97020",
"size": "9628",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mapproxy/seed/util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12401"
},
{
"name": "HTML",
"bytes": "18665"
},
{
"name": "Makefile",
"bytes": "1045"
},
{
"name": "Python",
"bytes": "1590097"
}
],
"symlink_target": ""
} |
import unittest
import uuid
import datetime
from boto.mturk.question import ExternalQuestion
from ._init_environment import SetHostMTurkConnection, external_url, \
config_environment
class Test(unittest.TestCase):
def setUp(self):
config_environment()
def test_create_hit_external(self):
q = ExternalQuestion(external_url=external_url, frame_height=800)
conn = SetHostMTurkConnection()
keywords=['boto', 'test', 'doctest']
create_hit_rs = conn.create_hit(question=q, lifetime=60*65, max_assignments=2, title="Boto External Question Test", keywords=keywords, reward = 0.05, duration=60*6, approval_delay=60*60, annotation='An annotation from boto external question test', response_groups=['Minimal', 'HITDetail', 'HITQuestion', 'HITAssignmentSummary',])
assert(create_hit_rs.status == True)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "f5d21e196953ec9da6be44d7eaadefb3",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 345,
"avg_line_length": 45.95238095238095,
"alnum_prop": 0.6652849740932643,
"repo_name": "catapult-project/catapult",
"id": "82ec08afa92d793a68571c87c8b02491edb6e8cb",
"size": "965",
"binary": false,
"copies": "11",
"ref": "refs/heads/main",
"path": "third_party/gsutil/gslib/vendored/boto/tests/mturk/create_hit_external.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1324"
},
{
"name": "C++",
"bytes": "46069"
},
{
"name": "CSS",
"bytes": "23376"
},
{
"name": "Dockerfile",
"bytes": "1541"
},
{
"name": "Go",
"bytes": "114396"
},
{
"name": "HTML",
"bytes": "12394298"
},
{
"name": "JavaScript",
"bytes": "1559584"
},
{
"name": "Makefile",
"bytes": "1774"
},
{
"name": "Python",
"bytes": "6778695"
},
{
"name": "Shell",
"bytes": "2288"
}
],
"symlink_target": ""
} |
import sqlite3
import Graffity
import glob
import os
connection = sqlite3.connect(os.environ.get('CIAO_SQL')+'Dataloggers.db')
cursor = connection.cursor()
CDMS_BaseDir = '/home/cdeen/Code/CIAO/SPARTA/SPARTA_CIAO/CONFIG/spcicfg'
CDMS_ConfigDir = '/config/RTCDATA/CIAO/DEFAULT/'
sqlCommand = "ALTER TABLE CIAO_1_DataLoggers ADD TIPPOWER"
cursor.execute(sqlCommand)
sqlCommand = "ALTER TABLE CIAO_1_DataLoggers ADD TILTPOWER"
cursor.execute(sqlCommand)
sqlCommand = "ALTER TABLE CIAO_2_DataLoggers ADD TIPPOWER"
cursor.execute(sqlCommand)
sqlCommand = "ALTER TABLE CIAO_2_DataLoggers ADD TILTPOWER"
cursor.execute(sqlCommand)
sqlCommand = "ALTER TABLE CIAO_3_DataLoggers ADD TIPPOWER"
cursor.execute(sqlCommand)
sqlCommand = "ALTER TABLE CIAO_3_DataLoggers ADD TILTPOWER"
cursor.execute(sqlCommand)
sqlCommand = "ALTER TABLE CIAO_4_DataLoggers ADD TIPPOWER"
cursor.execute(sqlCommand)
sqlCommand = "ALTER TABLE CIAO_4_DataLoggers ADD TILTPOWER"
cursor.execute(sqlCommand)
connection.commit()
connection.close()
| {
"content_hash": "cfbe3161924cc7adc16d55e96359a482",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 73,
"avg_line_length": 27.56756756756757,
"alnum_prop": 0.796078431372549,
"repo_name": "soylentdeen/Graffity",
"id": "2776d3ed6b18f77497758a59322c1d9f8073090d",
"size": "1020",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/SQLTools/addColumn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mathematica",
"bytes": "101623"
},
{
"name": "Python",
"bytes": "633170"
}
],
"symlink_target": ""
} |
from enum import Enum
class RdsDbType(Enum):
"""Only available types for the RDS"""
INSTANCE: str = "instance"
CLUSTER: str = "cluster"
| {
"content_hash": "80295df31bc06f9f0d2059a189fe3f97",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 42,
"avg_line_length": 18.875,
"alnum_prop": 0.6556291390728477,
"repo_name": "lyft/incubator-airflow",
"id": "154f65b5560c11a30a0590713239b287e4ddac40",
"size": "937",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "airflow/providers/amazon/aws/utils/rds.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17280"
},
{
"name": "HTML",
"bytes": "161328"
},
{
"name": "JavaScript",
"bytes": "25360"
},
{
"name": "Jinja",
"bytes": "8565"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "10019710"
},
{
"name": "Shell",
"bytes": "220780"
}
],
"symlink_target": ""
} |
from pyxb_114.exceptions_ import *
import unittest
import pyxb_114.binding.datatypes as xsd
class _TestIntegerType (object):
"""Base class for testing any datatype that descends from integer.
Subclasses should define class variables:
THIS_TYPE = the xsd datatype class
PARENT_TYPE = the next dominating type in the hierarchy
MIN_IN_RANGE = the minimum expressible value
MAX_IN_RANGE = the maximum expressible value
Optional values to set:
ZERO_IN_RANGE = False if zero not valid for subclass; default is True
"""
MIN_IN_RANGE = None
ZERO_IN_RANGE = True
MAX_IN_RANGE = None
def testParentage (self):
self.assertTrue(self.PARENT_TYPE == self.THIS_TYPE.XsdSuperType())
def testRange (self):
if self.MIN_IN_RANGE is not None:
if not ((self.MIN_IN_RANGE-1) in self.PARENT_EXCLUDE):
self.assertRaises(BadTypeValueError, self.THIS_TYPE, self.MIN_IN_RANGE - 1)
self.assertEquals(self.MIN_IN_RANGE, self.THIS_TYPE(self.MIN_IN_RANGE))
if self.ZERO_IN_RANGE:
self.assertEquals(0, self.THIS_TYPE(0))
if self.MAX_IN_RANGE is not None:
self.assertEquals(self.MAX_IN_RANGE, self.THIS_TYPE(self.MAX_IN_RANGE))
if not ((self.MAX_IN_RANGE+1) in self.PARENT_EXCLUDE):
self.assertRaises(BadTypeValueError, self.THIS_TYPE, self.MAX_IN_RANGE+1)
PARENT_EXCLUDE = []
def testStringConversion (self):
numbers = [ ]
if self.MIN_IN_RANGE is not None:
numbers.extend([self.MIN_IN_RANGE-1, self.MIN_IN_RANGE])
if self.ZERO_IN_RANGE:
numbers.append(0)
if self.MAX_IN_RANGE is not None:
numbers.extend([self.MAX_IN_RANGE, self.MAX_IN_RANGE+1])
for n in numbers:
s = '%d' % (n,)
p = None
if not (n in self.PARENT_EXCLUDE):
p = self.PARENT_TYPE(n)
self.assertEquals(n, p)
if ((self.MIN_IN_RANGE is None) or (self.MIN_IN_RANGE <= n)) \
and ((self.MAX_IN_RANGE is None) or (n <= self.MAX_IN_RANGE)):
bs = self.THIS_TYPE(s)
self.assertEquals(n, bs)
self.assertEquals(s, bs.xsdLiteral())
bp = self.THIS_TYPE(p)
self.assertEquals(n, bp)
else:
self.assertRaises(BadTypeValueError, self.THIS_TYPE, s)
if p is not None:
self.assertRaises(BadTypeValueError, self.THIS_TYPE, p)
class Test_byte (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.byte
PARENT_TYPE = xsd.short
MIN_IN_RANGE = -128
MAX_IN_RANGE = 127
class Test_unsignedByte (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.unsignedByte
PARENT_TYPE = xsd.unsignedShort
PARENT_EXCLUDE = [ -1 ]
MIN_IN_RANGE = 0
MAX_IN_RANGE = 255
class Test_short (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.short
PARENT_TYPE = xsd.int
MIN_IN_RANGE = -32768
MAX_IN_RANGE = 32767
class Test_unsignedShort (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.unsignedShort
PARENT_TYPE = xsd.unsignedInt
PARENT_EXCLUDE = [ -1 ]
MIN_IN_RANGE = 0
MAX_IN_RANGE = 65535
class Test_int (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.int
PARENT_TYPE = xsd.long
MIN_IN_RANGE = -2147483648
MAX_IN_RANGE = 2147483647
class Test_unsignedInt (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.unsignedInt
PARENT_TYPE = xsd.unsignedLong
PARENT_EXCLUDE = [ -1 ]
MIN_IN_RANGE = 0
MAX_IN_RANGE = 4294967295
class Test_long (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.long
PARENT_TYPE = xsd.integer
MIN_IN_RANGE = -9223372036854775808
MAX_IN_RANGE = 9223372036854775807
class Test_unsignedLong (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.unsignedLong
PARENT_TYPE = xsd.nonNegativeInteger
PARENT_EXCLUDE = [ -1 ]
MIN_IN_RANGE = 0
MAX_IN_RANGE = 18446744073709551615
class Test_negativeInteger (unittest.TestCase, _TestIntegerType):
ZERO_IN_RANGE = False
THIS_TYPE = xsd.negativeInteger
PARENT_TYPE = xsd.nonPositiveInteger
MAX_IN_RANGE = -1
class Test_nonPositiveInteger (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.nonPositiveInteger
PARENT_TYPE = xsd.integer
MAX_IN_RANGE = 0
class Test_nonNegativeInteger (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.nonNegativeInteger
PARENT_TYPE = xsd.integer
MIN_IN_RANGE = 0
class Test_positiveInteger (unittest.TestCase, _TestIntegerType):
THIS_TYPE = xsd.positiveInteger
PARENT_TYPE = xsd.nonNegativeInteger
MIN_IN_RANGE = 1
ZERO_IN_RANGE = False
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "e500ff6840f923c3eb204ce94dfb29a6",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 91,
"avg_line_length": 34.241134751773046,
"alnum_prop": 0.6425020712510356,
"repo_name": "msherry/PyXB-1.1.4",
"id": "7985e9e22474dcaa94df8993b869259c99f7d103",
"size": "4828",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/datatypes/test-IntegerTypes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "6307"
},
{
"name": "Python",
"bytes": "1521054"
},
{
"name": "Shell",
"bytes": "23730"
}
],
"symlink_target": ""
} |
"""
Classes for reading/manipulating/writing exciting input files.
"""
import xml.etree.ElementTree as ET
import numpy as np
import scipy.constants as const
from monty.io import zopen
from monty.json import MSONable
from pymatgen.core.lattice import Lattice
from pymatgen.core.periodic_table import Element
from pymatgen.core.structure import Structure
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from pymatgen.symmetry.bandstructure import HighSymmKpath
__author__ = "Christian Vorwerk"
__copyright__ = "Copyright 2016"
__version__ = "1.0"
__maintainer__ = "Christian Vorwerk"
__email__ = "vorwerk@physik.hu-berlin.de"
__status__ = "Development"
__date__ = "Nov 28, 2016"
class ExcitingInput(MSONable):
"""
Object for representing the data stored in the structure part of the
exciting input.
.. attribute:: structure
Associated Structure.
.. attribute:: title
Optional title string.
.. attribute:: lockxyz
Lockxyz attribute for each site if available. A Nx3 array of
booleans.
"""
def __init__(self, structure, title=None, lockxyz=None):
"""
Args:
structure (Structure): Structure object.
title (str): Optional title for exciting input. Defaults to unit
cell formula of structure. Defaults to None.
lockxyz (Nx3 array): bool values for selective dynamics,
where N is number of sites. Defaults to None.
"""
if structure.is_ordered:
site_properties = {}
if lockxyz:
site_properties["selective_dynamics"] = lockxyz
self.structure = structure.copy(site_properties=site_properties)
self.title = structure.formula if title is None else title
else:
raise ValueError("Structure with partial occupancies cannot be converted into exciting input!")
# define conversion factor between Bohr radius and Angstrom
bohr2ang = const.value("Bohr radius") / const.value("Angstrom star")
@property
def lockxyz(self):
"""
:return: Selective dynamics site properties.
"""
return self.structure.site_properties.get("selective_dynamics")
@lockxyz.setter
def lockxyz(self, lockxyz):
self.structure.add_site_property("selective_dynamics", lockxyz)
@staticmethod
def from_string(data):
"""
Reads the exciting input from a string
"""
root = ET.fromstring(data)
speciesnode = root.find("structure").iter("species")
elements = []
positions = []
vectors = []
lockxyz = []
# get title
title_in = str(root.find("title").text)
# Read elements and coordinates
for nodes in speciesnode:
symbol = nodes.get("speciesfile").split(".")[0]
if len(symbol.split("_")) == 2:
symbol = symbol.split("_")[0]
if Element.is_valid_symbol(symbol):
# Try to recognize the element symbol
element = symbol
else:
raise ValueError("Unknown element!")
for atom in nodes.iter("atom"):
x, y, z = atom.get("coord").split()
positions.append([float(x), float(y), float(z)])
elements.append(element)
# Obtain lockxyz for each atom
if atom.get("lockxyz") is not None:
lxyz = []
for l in atom.get("lockxyz").split():
if l in ("True", "true"):
lxyz.append(True)
else:
lxyz.append(False)
lockxyz.append(lxyz)
else:
lockxyz.append([False, False, False])
# check the atomic positions type
if "cartesian" in root.find("structure").attrib.keys():
if root.find("structure").attrib["cartesian"]:
cartesian = True
for i, p in enumerate(positions):
for j in range(3):
p[j] = p[j] * ExcitingInput.bohr2ang
print(positions)
else:
cartesian = False
# get the scale attribute
scale_in = root.find("structure").find("crystal").get("scale")
if scale_in:
scale = float(scale_in) * ExcitingInput.bohr2ang
else:
scale = ExcitingInput.bohr2ang
# get the stretch attribute
stretch_in = root.find("structure").find("crystal").get("stretch")
if stretch_in:
stretch = np.array([float(a) for a in stretch_in])
else:
stretch = np.array([1.0, 1.0, 1.0])
# get basis vectors and scale them accordingly
basisnode = root.find("structure").find("crystal").iter("basevect")
for vect in basisnode:
x, y, z = vect.text.split()
vectors.append(
[
float(x) * stretch[0] * scale,
float(y) * stretch[1] * scale,
float(z) * stretch[2] * scale,
]
)
# create lattice and structure object
lattice_in = Lattice(vectors)
structure_in = Structure(lattice_in, elements, positions, coords_are_cartesian=cartesian)
return ExcitingInput(structure_in, title_in, lockxyz)
@staticmethod
def from_file(filename):
"""
:param filename: Filename
:return: ExcitingInput
"""
with zopen(filename, "rt") as f:
data = f.read().replace("\n", "")
return ExcitingInput.from_string(data)
def write_etree(self, celltype, cartesian=False, bandstr=False, symprec=0.4, angle_tolerance=5, **kwargs):
"""
Writes the exciting input parameters to an xml object.
Args:
celltype (str): Choice of unit cell. Can be either the unit cell
from self.structure ("unchanged"), the conventional cell
("conventional"), or the primitive unit cell ("primitive").
cartesian (bool): Whether the atomic positions are provided in
Cartesian or unit-cell coordinates. Default is False.
bandstr (bool): Whether the bandstructure path along the
HighSymmKpath is included in the input file. Only supported if the
celltype is set to "primitive". Default is False.
symprec (float): Tolerance for the symmetry finding. Default is 0.4.
angle_tolerance (float): Angle tolerance for the symmetry finding.
Default is 5.
**kwargs: Additional parameters for the input file.
Returns:
ET.Element containing the input XML structure
"""
root = ET.Element("input")
root.set(
"{http://www.w3.org/2001/XMLSchema-instance}noNamespaceSchemaLocation",
"http://xml.exciting-code.org/excitinginput.xsd",
)
title = ET.SubElement(root, "title")
title.text = self.title
if cartesian:
structure = ET.SubElement(root, "structure", cartesian="true", speciespath="./")
else:
structure = ET.SubElement(root, "structure", speciespath="./")
crystal = ET.SubElement(structure, "crystal")
# set scale such that lattice vector can be given in Angstrom
ang2bohr = const.value("Angstrom star") / const.value("Bohr radius")
crystal.set("scale", str(ang2bohr))
# determine which structure to use
finder = SpacegroupAnalyzer(self.structure, symprec=symprec, angle_tolerance=angle_tolerance)
if celltype == "primitive":
new_struct = finder.get_primitive_standard_structure(international_monoclinic=False)
elif celltype == "conventional":
new_struct = finder.get_conventional_standard_structure(international_monoclinic=False)
elif celltype == "unchanged":
new_struct = self.structure
else:
raise ValueError("Type of unit cell not recognized!")
# write lattice
basis = new_struct.lattice.matrix
for i in range(3):
basevect = ET.SubElement(crystal, "basevect")
basevect.text = "{:16.8f} {:16.8f} {:16.8f}".format(
basis[i][0],
basis[i][1],
basis[i][2],
)
# write atomic positions for each species
index = 0
for i in sorted(new_struct.types_of_species, key=lambda el: el.X):
species = ET.SubElement(structure, "species", speciesfile=i.symbol + ".xml")
sites = new_struct.indices_from_symbol(i.symbol)
for j in sites:
coord = "{:16.8f} {:16.8f} {:16.8f}".format(
new_struct[j].frac_coords[0],
new_struct[j].frac_coords[1],
new_struct[j].frac_coords[2],
)
# obtain cartesian coords from fractional ones if needed
if cartesian:
coord2 = []
for k in range(3):
inter = (
new_struct[j].frac_coords[k] * basis[0][k]
+ new_struct[j].frac_coords[k] * basis[1][k]
+ new_struct[j].frac_coords[k] * basis[2][k]
) * ang2bohr
coord2.append(inter)
coord = f"{coord2[0]:16.8f} {coord2[1]:16.8f} {coord2[2]:16.8f}"
# write atomic positions
index = index + 1
_ = ET.SubElement(species, "atom", coord=coord)
# write bandstructure if needed
if bandstr and celltype == "primitive":
kpath = HighSymmKpath(new_struct, symprec=symprec, angle_tolerance=angle_tolerance)
prop = ET.SubElement(root, "properties")
bandstrct = ET.SubElement(prop, "bandstructure")
for i in range(len(kpath.kpath["path"])):
plot = ET.SubElement(bandstrct, "plot1d")
path = ET.SubElement(plot, "path", steps="100")
for j in range(len(kpath.kpath["path"][i])):
symbol = kpath.kpath["path"][i][j]
coords = kpath.kpath["kpoints"][symbol]
coord = f"{coords[0]:16.8f} {coords[1]:16.8f} {coords[2]:16.8f}"
if symbol == "\\Gamma":
symbol = "GAMMA"
_ = ET.SubElement(path, "point", coord=coord, label=symbol)
elif bandstr and celltype != "primitive":
raise ValueError(
"Bandstructure is only implemented for the \
standard primitive unit cell!"
)
# write extra parameters from kwargs if provided
self._dicttoxml(kwargs, root)
return root
def write_string(self, celltype, cartesian=False, bandstr=False, symprec=0.4, angle_tolerance=5, **kwargs):
"""
Writes exciting input.xml as a string.
Args:
celltype (str): Choice of unit cell. Can be either the unit cell
from self.structure ("unchanged"), the conventional cell
("conventional"), or the primitive unit cell ("primitive").
cartesian (bool): Whether the atomic positions are provided in
Cartesian or unit-cell coordinates. Default is False.
bandstr (bool): Whether the bandstructure path along the
HighSymmKpath is included in the input file. Only supported if the
celltype is set to "primitive". Default is False.
symprec (float): Tolerance for the symmetry finding. Default is 0.4.
angle_tolerance (float): Angle tolerance for the symmetry finding.
Default is 5.
**kwargs: Additional parameters for the input file.
Returns:
String
"""
try:
root = self.write_etree(celltype, cartesian, bandstr, symprec, angle_tolerance, **kwargs)
self._indent(root)
# output should be a string not a bytes object
string = ET.tostring(root).decode("UTF-8")
except Exception:
raise ValueError("Incorrect celltype!")
return string
def write_file(self, celltype, filename, cartesian=False, bandstr=False, symprec=0.4, angle_tolerance=5, **kwargs):
"""
Writes exciting input file.
Args:
celltype (str): Choice of unit cell. Can be either the unit cell
from self.structure ("unchanged"), the conventional cell
("conventional"), or the primitive unit cell ("primitive").
filename (str): Filename for exciting input.
cartesian (bool): Whether the atomic positions are provided in
Cartesian or unit-cell coordinates. Default is False.
bandstr (bool): Whether the bandstructure path along the
HighSymmKpath is included in the input file. Only supported if the
celltype is set to "primitive". Default is False.
symprec (float): Tolerance for the symmetry finding. Default is 0.4.
angle_tolerance (float): Angle tolerance for the symmetry finding.
Default is 5.
**kwargs: Additional parameters for the input file.
"""
try:
root = self.write_etree(celltype, cartesian, bandstr, symprec, angle_tolerance, **kwargs)
self._indent(root)
tree = ET.ElementTree(root)
tree.write(filename)
except Exception:
raise ValueError("Incorrect celltype!")
# Missing PrettyPrint option in the current version of xml.etree.cElementTree
@staticmethod
def _indent(elem, level=0):
"""
Helper method to indent elements.
:param elem:
:param level:
:return:
"""
i = "\n" + level * " "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for el in elem:
ExcitingInput._indent(el, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def _dicttoxml(self, paramdict_, element):
for key, value in paramdict_.items():
if isinstance(value, str) and key == "text()":
element.text = value
elif isinstance(value, str):
element.attrib[key] = value
elif isinstance(value, list):
for item in value:
self._dicttoxml(item, ET.SubElement(element, key))
elif isinstance(value, dict):
if element.findall(key) == []:
self._dicttoxml(value, ET.SubElement(element, key))
else:
self._dicttoxml(value, element.findall(key)[0])
else:
print("cannot deal with", key, "=", value)
| {
"content_hash": "b62641fa2477c342990e1d8ea25ade7f",
"timestamp": "",
"source": "github",
"line_count": 389,
"max_line_length": 119,
"avg_line_length": 39.33161953727507,
"alnum_prop": 0.5626143790849674,
"repo_name": "vorwerkc/pymatgen",
"id": "b4774624f3825d95e7224ebad6238fe6d8993e1c",
"size": "15394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymatgen/io/exciting/inputs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "87"
},
{
"name": "CSS",
"bytes": "7572"
},
{
"name": "Cython",
"bytes": "38792"
},
{
"name": "HTML",
"bytes": "12642493"
},
{
"name": "Python",
"bytes": "8941675"
},
{
"name": "Roff",
"bytes": "1407429"
},
{
"name": "Shell",
"bytes": "12010"
}
],
"symlink_target": ""
} |
from __future__ import division
from telemetry.core import util
from telemetry.image_processing import histogram
from telemetry.image_processing import rgba_color
from telemetry.util import external_modules
util.AddDirToPythonPath(util.GetTelemetryDir(), 'third_party', 'png')
import png # pylint: disable=F0401
cv2 = external_modules.ImportOptionalModule('cv2')
np = external_modules.ImportRequiredModule('numpy')
def Channels(image):
return image.shape[2]
def Width(image):
return image.shape[1]
def Height(image):
return image.shape[0]
def Pixels(image):
return bytearray(np.uint8(image[:, :, ::-1]).flat) # Convert from bgr to rgb.
def GetPixelColor(image, x, y):
bgr = image[y][x]
return rgba_color.RgbaColor(bgr[2], bgr[1], bgr[0])
def WritePngFile(image, path):
if cv2 is not None:
cv2.imwrite(path, image)
else:
with open(path, "wb") as f:
metadata = {}
metadata['size'] = (Width(image), Height(image))
metadata['alpha'] = False
metadata['bitdepth'] = 8
img = image[:, :, ::-1]
pixels = img.reshape(-1).tolist()
png.Writer(**metadata).write_array(f, pixels)
def FromRGBPixels(width, height, pixels, bpp):
img = np.array(pixels, order='F', dtype=np.uint8)
img.resize((height, width, bpp))
if bpp == 4:
img = img[:, :, :3] # Drop alpha.
return img[:, :, ::-1] # Convert from rgb to bgr.
def FromPngFile(path):
if cv2 is not None:
img = cv2.imread(path, cv2.CV_LOAD_IMAGE_COLOR)
if img is None:
raise ValueError('Image at path {0} could not be read'.format(path))
return img
else:
with open(path, "rb") as f:
return FromPng(f.read())
def FromPng(png_data):
if cv2 is not None:
file_bytes = np.asarray(bytearray(png_data), dtype=np.uint8)
return cv2.imdecode(file_bytes, cv2.CV_LOAD_IMAGE_COLOR)
else:
width, height, pixels, meta = png.Reader(bytes=png_data).read_flat()
return FromRGBPixels(width, height, pixels, 4 if meta['alpha'] else 3)
def _SimpleDiff(image1, image2):
if cv2 is not None:
return cv2.absdiff(image1, image2)
else:
amax = np.maximum(image1, image2)
amin = np.minimum(image1, image2)
return amax - amin
def AreEqual(image1, image2, tolerance, likely_equal):
if image1.shape != image2.shape:
return False
self_image = image1
other_image = image2
if tolerance:
if likely_equal:
return np.amax(_SimpleDiff(image1, image2)) <= tolerance
else:
for row in xrange(Height(image1)):
if np.amax(_SimpleDiff(image1[row], image2[row])) > tolerance:
return False
return True
else:
if likely_equal:
return (self_image == other_image).all()
else:
for row in xrange(Height(image1)):
if not (self_image[row] == other_image[row]).all():
return False
return True
def Diff(image1, image2):
self_image = image1
other_image = image2
if image1.shape[2] != image2.shape[2]:
raise ValueError('Cannot diff images of differing bit depth')
if image1.shape[:2] != image2.shape[:2]:
width = max(Width(image1), Width(image2))
height = max(Height(image1), Height(image2))
self_image = np.zeros((width, height, image1.shape[2]), np.uint8)
other_image = np.zeros((width, height, image1.shape[2]), np.uint8)
self_image[0:Height(image1), 0:Width(image1)] = image1
other_image[0:Height(image2), 0:Width(image2)] = image2
return _SimpleDiff(self_image, other_image)
def GetBoundingBox(image, color, tolerance):
if cv2 is not None:
color = np.array([color.b, color.g, color.r])
img = cv2.inRange(image, np.subtract(color[0:3], tolerance),
np.add(color[0:3], tolerance))
count = cv2.countNonZero(img)
if count == 0:
return None, 0
contours, _ = cv2.findContours(img, cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE)
contour = np.concatenate(contours)
return cv2.boundingRect(contour), count
else:
if tolerance:
color = np.array([color.b, color.g, color.r])
colorm = color - tolerance
colorp = color + tolerance
b = image[:, :, 0]
g = image[:, :, 1]
r = image[:, :, 2]
w = np.where(((b >= colorm[0]) & (b <= colorp[0]) &
(g >= colorm[1]) & (g <= colorp[1]) &
(r >= colorm[2]) & (r <= colorp[2])))
else:
w = np.where((image[:, :, 0] == color.b) &
(image[:, :, 1] == color.g) &
(image[:, :, 2] == color.r))
if len(w[0]) == 0:
return None, 0
return (w[1][0], w[0][0], w[1][-1] - w[1][0] + 1, w[0][-1] - w[0][0] + 1), \
len(w[0])
def Crop(image, left, top, width, height):
img_height, img_width = image.shape[:2]
if (left < 0 or top < 0 or
(left + width) > img_width or
(top + height) > img_height):
raise ValueError('Invalid dimensions')
return image[top:top + height, left:left + width]
def GetColorHistogram(image, ignore_color, tolerance):
if cv2 is not None:
mask = None
if ignore_color is not None:
color = np.array([ignore_color.b, ignore_color.g, ignore_color.r])
mask = ~cv2.inRange(image, np.subtract(color, tolerance),
np.add(color, tolerance))
flatten = np.ndarray.flatten
hist_b = flatten(cv2.calcHist([image], [0], mask, [256], [0, 256]))
hist_g = flatten(cv2.calcHist([image], [1], mask, [256], [0, 256]))
hist_r = flatten(cv2.calcHist([image], [2], mask, [256], [0, 256]))
else:
filtered = image.reshape(-1, 3)
if ignore_color is not None:
color = np.array([ignore_color.b, ignore_color.g, ignore_color.r])
colorm = np.array(color) - tolerance
colorp = np.array(color) + tolerance
in_range = ((filtered[:, 0] < colorm[0]) | (filtered[:, 0] > colorp[0]) |
(filtered[:, 1] < colorm[1]) | (filtered[:, 1] > colorp[1]) |
(filtered[:, 2] < colorm[2]) | (filtered[:, 2] > colorp[2]))
filtered = np.compress(in_range, filtered, axis = 0)
if len(filtered[:, 0]) == 0:
return histogram.ColorHistogram(np.zeros((256)), np.zeros((256)),
np.zeros((256)), ignore_color)
hist_b = np.bincount(filtered[:, 0], minlength=256)
hist_g = np.bincount(filtered[:, 1], minlength=256)
hist_r = np.bincount(filtered[:, 2], minlength=256)
return histogram.ColorHistogram(hist_r, hist_g, hist_b, ignore_color)
| {
"content_hash": "fabb22576dd077f5b38cea8817e57d07",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 80,
"avg_line_length": 35.48066298342541,
"alnum_prop": 0.6102460292743693,
"repo_name": "CTSRD-SOAAP/chromium-42.0.2311.135",
"id": "538909668d71cc485689351903c2755dea782bef",
"size": "6585",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tools/telemetry/telemetry/image_processing/image_util_numpy_impl.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "8402"
},
{
"name": "Assembly",
"bytes": "241154"
},
{
"name": "C",
"bytes": "12370053"
},
{
"name": "C++",
"bytes": "266788423"
},
{
"name": "CMake",
"bytes": "27829"
},
{
"name": "CSS",
"bytes": "813488"
},
{
"name": "Emacs Lisp",
"bytes": "2360"
},
{
"name": "Go",
"bytes": "13628"
},
{
"name": "Groff",
"bytes": "5283"
},
{
"name": "HTML",
"bytes": "20131029"
},
{
"name": "Java",
"bytes": "8495790"
},
{
"name": "JavaScript",
"bytes": "12980966"
},
{
"name": "LLVM",
"bytes": "1169"
},
{
"name": "Logos",
"bytes": "6893"
},
{
"name": "Lua",
"bytes": "16189"
},
{
"name": "Makefile",
"bytes": "208709"
},
{
"name": "Objective-C",
"bytes": "1509363"
},
{
"name": "Objective-C++",
"bytes": "7960581"
},
{
"name": "PLpgSQL",
"bytes": "215882"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "432373"
},
{
"name": "Python",
"bytes": "11147426"
},
{
"name": "Ragel in Ruby Host",
"bytes": "104923"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "1207731"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "VimL",
"bytes": "4075"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
""" Package summary:
Modules:
database_setup - Set up restaurantmenu database.
lotsofmenus - Populate the database with dummy data.
"""
| {
"content_hash": "c4131ded35f8389d28c367137eeaabd2",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 54,
"avg_line_length": 28,
"alnum_prop": 0.7428571428571429,
"repo_name": "courtneypattison/second-response",
"id": "27156be631bfaf84455606d9a91586be99cac2dc",
"size": "141",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "secondresponse/database/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11171"
},
{
"name": "HTML",
"bytes": "8844"
},
{
"name": "JavaScript",
"bytes": "23824"
},
{
"name": "Python",
"bytes": "16745"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, absolute_import
from django.shortcuts import render
# Create your views here.
| {
"content_hash": "dd7f8013d6fcdf6bfb20dd976b303f67",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 57,
"avg_line_length": 24.4,
"alnum_prop": 0.7786885245901639,
"repo_name": "kazaname/kurs_django",
"id": "9a0ff308b0e8541f01e9c2f117a033c6a7189595",
"size": "138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shelf/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7677"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, division, absolute_import
import logging
import urllib
from jinja2 import TemplateSyntaxError
from flexget import plugin
from flexget.event import event
from flexget.task import Task
from flexget.utils.search import normalize_unicode
log = logging.getLogger('search_rss')
class SearchRSS(object):
"""A generic search plugin that can use rss based search feeds. Configure it like rss
plugin, but include {{{search_term}}} in the url where the search term should go."""
schema = {'$ref': '/schema/plugin/rss'}
def search(self, task, entry, config=None):
from flexget.utils.template import environment
search_strings = [urllib.quote(normalize_unicode(s).encode('utf-8'))
for s in entry.get('search_strings', [entry['title']])]
rss_plugin = plugin.get_plugin_by_name('rss')
entries = set()
rss_config = rss_plugin.instance.build_config(config)
try:
template = environment.from_string(rss_config['url'])
except TemplateSyntaxError as e:
raise plugin.PluginError('Invalid jinja template as rss url: %s' % e)
rss_config['all_entries'] = True
for search_string in search_strings:
rss_config['url'] = template.render({'search_term': search_string})
# TODO: capture some other_fields to try to find seed/peer/content_size numbers?
try:
results = rss_plugin.phase_handlers['input'](task, rss_config)
except plugin.PluginError as e:
log.error('Error attempting to get rss for %s: %s', rss_config['url'], e)
else:
entries.update(results)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(SearchRSS, 'search_rss', groups=['search'], api_ver=2)
| {
"content_hash": "4a4b7b727e5726c9d543230c3dadb57e",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 92,
"avg_line_length": 39.744680851063826,
"alnum_prop": 0.6504282655246253,
"repo_name": "Pretagonist/Flexget",
"id": "4ff8603f2ff4473b2ce60ee5deba8b87a1ee053e",
"size": "1868",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "flexget/plugins/search_rss.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6197"
},
{
"name": "HTML",
"bytes": "38747"
},
{
"name": "JavaScript",
"bytes": "64717"
},
{
"name": "Python",
"bytes": "2462608"
}
],
"symlink_target": ""
} |
from django.conf import settings
default_settings = {
'MAILCHIMP_CLIENT_ID': None,
'MAILCHIMP_CLIENT_SECRET': None,
'MAILCHIMP_AUTHORIZATION_URL': 'https://login.mailchimp.com/oauth2/authorize',
'MAILCHIMP_ACCESS_TOKEN_URL': 'https://login.mailchimp.com/oauth2/token',
'MAILCHIMP_METADATA_URL': 'https://login.mailchimp.com/oauth2/metadata',
'MAILCHIMP_SUCCESS_REDIRECT_URL': '/',
}
class Settings(object):
def __init__(self, app_settings, defaults):
for k, v in defaults.iteritems():
setattr(self, k, getattr(app_settings, k, v))
settings = Settings(settings, default_settings)
| {
"content_hash": "57100182688fc33709ce86bb94ff6ad8",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 82,
"avg_line_length": 33.26315789473684,
"alnum_prop": 0.6867088607594937,
"repo_name": "avelis/django-plantains",
"id": "8fde42536835529cd7f007ebfd12411299441a89",
"size": "657",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plantains/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7615"
}
],
"symlink_target": ""
} |
"""
Management command for loading all the known classifiers from the official
pypi, or from a file/url.
Note, pypi docs says to not add classifiers that are not used in submitted
packages. On the other hand it can be usefull to have a list of classifiers
to choose if you have to modify package data. Use it if you need it.
"""
from __future__ import with_statement
import urllib
import os.path
from django.core.management.base import BaseCommand
from userpypi.models import Classifier
CLASSIFIERS_URL = "http://pypi.python.org/pypi?%3Aaction=list_classifiers"
class Command(BaseCommand):
help = """Load all classifiers from pypi. If any arguments are given,
they will be used as paths or urls for classifiers instead of using the
official pypi list url"""
def handle(self, *args, **options):
args = args or [CLASSIFIERS_URL]
cnt = 0
for location in args:
print "Loading %s" % location
lines = self._get_lines(location)
for name in lines:
c, created = Classifier.objects.get_or_create(name=name)
if created:
c.save()
cnt += 1
print "Added %s new classifiers from %s source(s)" % (cnt, len(args))
def _get_lines(self, location):
"""Return a list of lines for a lication that can be a file or
a url. If path/url doesn't exist, returns an empty list"""
try: # This is dirty, but OK I think. both net and file ops raise IOE
if location.startswith(("http://", "https://")):
fp = urllib.urlopen(location)
return [e.strip() for e in fp.read().split('\n')
if e and not e.isspace()]
else:
fp = open(location)
return [e.strip() for e in fp.readlines()
if e and not e.isspace()]
except IOError:
print "Couldn't load %s" % location
return []
| {
"content_hash": "bf3b634e8278e624d628cd0a9f8af030",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 77,
"avg_line_length": 37.43396226415094,
"alnum_prop": 0.6058467741935484,
"repo_name": "mattcaldwell/djangopypi",
"id": "1fc9c71163d14cee8577a174877c88991d8667d5",
"size": "1984",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "userpypi/management/commands/loadclassifiers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "150937"
}
],
"symlink_target": ""
} |
from pyasn1.type import namedtype
from pyasn1.type import tag
from pyasn1.type import univ
from pyasn1_modules.rfc2459 import AlgorithmIdentifier
pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1')
rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1')
md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2')
md4WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.3')
md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4')
sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5')
rsaOAEPEncryptionSET = univ.ObjectIdentifier('1.2.840.113549.1.1.6')
id_RSAES_OAEP = univ.ObjectIdentifier('1.2.840.113549.1.1.7')
id_mgf1 = univ.ObjectIdentifier('1.2.840.113549.1.1.8')
id_pSpecified = univ.ObjectIdentifier('1.2.840.113549.1.1.9')
id_sha1 = univ.ObjectIdentifier('1.3.14.3.2.26')
MAX = float('inf')
class Version(univ.Integer):
pass
class RSAPrivateKey(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('version', Version()),
namedtype.NamedType('modulus', univ.Integer()),
namedtype.NamedType('publicExponent', univ.Integer()),
namedtype.NamedType('privateExponent', univ.Integer()),
namedtype.NamedType('prime1', univ.Integer()),
namedtype.NamedType('prime2', univ.Integer()),
namedtype.NamedType('exponent1', univ.Integer()),
namedtype.NamedType('exponent2', univ.Integer()),
namedtype.NamedType('coefficient', univ.Integer())
)
class RSAPublicKey(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('modulus', univ.Integer()),
namedtype.NamedType('publicExponent', univ.Integer())
)
# XXX defaults not set
class RSAES_OAEP_params(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('hashFunc', AlgorithmIdentifier().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.NamedType('maskGenFunc', AlgorithmIdentifier().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
namedtype.NamedType('pSourceFunc', AlgorithmIdentifier().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
)
| {
"content_hash": "db03f0edaf27d88928f1770328c2ad52",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 84,
"avg_line_length": 40.92857142857143,
"alnum_prop": 0.7185863874345549,
"repo_name": "catapult-project/catapult",
"id": "4e4113ffe528c19785afdc3d46dea812e676668c",
"size": "2617",
"binary": false,
"copies": "14",
"ref": "refs/heads/main",
"path": "third_party/gsutil/third_party/pyasn1-modules/pyasn1_modules/rfc2437.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1324"
},
{
"name": "C++",
"bytes": "46069"
},
{
"name": "CSS",
"bytes": "23376"
},
{
"name": "Dockerfile",
"bytes": "1541"
},
{
"name": "Go",
"bytes": "114396"
},
{
"name": "HTML",
"bytes": "12394298"
},
{
"name": "JavaScript",
"bytes": "1559584"
},
{
"name": "Makefile",
"bytes": "1774"
},
{
"name": "Python",
"bytes": "6778695"
},
{
"name": "Shell",
"bytes": "2288"
}
],
"symlink_target": ""
} |
"""Execute a volatility command on the client memory.
This module implements the volatility enabled client actions which enable
volatility to operate directly on the client.
"""
# Initialize the volatility plugins, so pylint: disable=unused-import
from volatility import addrspace
from volatility import obj
from volatility import plugins
from volatility import session
from volatility.plugins.addrspaces import standard
from volatility.ui import renderer
# pylint: enable=unused-import
from grr.client import actions
from grr.client import vfs
from grr.lib import rdfvalue
from grr.lib import utils
# pylint: disable=g-bad-name
class ProtobufRenderer(renderer.RendererBaseClass):
"""This stores all the data in a protobuf."""
class Modes(object):
TABLE = 1
STRING = 2
def __init__(self, **kwargs):
super(ProtobufRenderer, self).__init__(**kwargs)
self.response = rdfvalue.VolatilityResult()
self.active_section = None
self.mode = None
def InitSection(self, mode=None):
if self.mode != mode and self.active_section:
self.response.sections.Append(self.active_section)
self.active_section = None
if not self.active_section:
self.active_section = rdfvalue.VolatilitySection()
self.mode = mode
def end(self):
self.response.sections.Append(self.active_section)
def start(self, plugin_name=None, kwargs=None):
_ = kwargs
if plugin_name:
self.response.plugin = plugin_name
def write(self, data):
self.format(data)
def format(self, formatstring, *data):
_ = formatstring, data
self.InitSection(self.Modes.STRING)
active_list = self.active_section.formatted_value_list
formatted_value = active_list.formatted_values.Append()
formatted_value.formatstring = formatstring
values = formatted_value.data
for d in data:
self.AddValue(values, d)
def section(self):
self.response.sections.Append(self.active_section)
self.active_section = None
def flush(self):
pass
def table_header(self, title_format_list=None, suppress_headers=False,
name=None):
_ = suppress_headers, name
self.InitSection(self.Modes.TABLE)
for (print_name, name, format_hint) in title_format_list:
self.active_section.table.headers.Append(print_name=print_name,
name=name,
format_hint=format_hint)
def AddValue(self, row, value):
response = row.values.Append()
if isinstance(value, obj.BaseObject):
response.type = value.obj_type
response.name = value.obj_name
response.offset = value.obj_offset
response.vm = utils.SmartStr(value.obj_vm)
try:
response.value = value.__int__()
except (AttributeError, ValueError):
pass
try:
string_value = value.__unicode__()
except (AttributeError, ValueError):
try:
string_value = value.__str__()
except (AttributeError, ValueError):
pass
if string_value:
try:
int_value = int(string_value)
# If the string converts to an int but to a different one as the int
# representation, we send it.
if int_value != response.value:
response.svalue = utils.SmartUnicode(string_value)
except ValueError:
# We also send if it doesn't convert back to an int.
response.svalue = utils.SmartUnicode(string_value)
elif isinstance(value, (bool)):
response.svalue = utils.SmartUnicode(str(value))
elif isinstance(value, (int, long)):
response.value = value
elif isinstance(value, (basestring)):
response.svalue = utils.SmartUnicode(value)
elif isinstance(value, obj.NoneObject):
response.type = value.__class__.__name__
response.reason = value.reason
else:
response.svalue = utils.SmartUnicode(repr(value))
def table_row(self, *args):
"""Outputs a single row of a table."""
self.InitSection(self.Modes.TABLE)
row = self.active_section.table.rows.Append()
for value in args:
self.AddValue(row, value)
def GetResponse(self):
return self.response
def RenderProgress(self, *args):
self.session.progress(*args)
class UnicodeStringIO(object):
"""Just like StringIO but uses unicode strings."""
def __init__(self):
self.data = u""
# Have to stick to an interface here so pylint: disable=g-bad-name
def write(self, data):
self.data += utils.SmartUnicode(data)
def getvalue(self):
return self.data
class CachingFDAddressSpace(addrspace.CachingAddressSpaceMixIn,
standard.FDAddressSpace):
"""A Caching version of the address space."""
class VolatilityAction(actions.ActionPlugin):
"""Runs a volatility command on live memory."""
in_rdfvalue = rdfvalue.VolatilityRequest
out_rdfvalue = rdfvalue.VolatilityResult
def Run(self, request):
"""Run a volatility plugin and return the result."""
def Progress(message=None, **_):
"""Allow volatility to heartbeat us so we do not die."""
_ = message
self.Progress()
# Create a session and run all the plugins with it.
with vfs.VFSOpen(request.device) as fhandle:
session_args = request.session.ToDict()
vol_session = session.Session(**session_args)
# Make the physical address space by wrapping our VFS handler.
vol_session.physical_address_space = CachingFDAddressSpace(
fhandle=fhandle)
# Set the progress method so the nanny is heartbeat.
vol_session.progress = Progress
vol_session.renderer = "ProtobufRenderer"
# Get the dtb from the driver if possible,
# it significantly speeds up detection.
try:
vol_session.dtb = fhandle.cr3
except AttributeError:
pass
# Get the kdbg from the driver if possible,
# it significantly speeds up detection.
try:
vol_session.kdbg = fhandle.kdbg
except AttributeError:
pass
# Which profile should be used?
if request.profile:
vol_session.profile = request.profile
else:
vol_session.plugins.guess_profile().update_session()
if not vol_session.profile:
raise RuntimeError("Unable to autodetect profile")
# Try to load the kernel address space now.
if not vol_session.kernel_address_space:
vol_session.plugins.load_as().GetVirtualAddressSpace()
# Get the keyword args to this plugin.
vol_args = request.args.ToDict()
for plugin, plugin_args in vol_args.items():
error = ""
# Heartbeat the client to ensure we keep our nanny happy.
vol_session.progress(message="Running plugin %s" % plugin)
ui_renderer = ProtobufRenderer(session=vol_session)
if plugin_args is None:
plugin_args = {}
else:
plugin_args = plugin_args.ToDict()
try:
vol_session.vol(plugin, renderer=ui_renderer, **plugin_args)
except Exception as e: # pylint: disable=broad-except
error = str(e)
response = ui_renderer.GetResponse()
if error:
response.error = error
# Send it back to the server.
self.SendReply(response)
| {
"content_hash": "9419697754be465fce07dd368561ac83",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 78,
"avg_line_length": 29.848979591836734,
"alnum_prop": 0.6578695473813756,
"repo_name": "simsong/grr-insider",
"id": "47753a6bdb2ca10f73fc3c13db25dc3c8e998009",
"size": "7386",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "client/client_actions/grr_volatility.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "227"
},
{
"name": "C++",
"bytes": "55149"
},
{
"name": "CSS",
"bytes": "36308"
},
{
"name": "JavaScript",
"bytes": "679269"
},
{
"name": "Python",
"bytes": "3553249"
},
{
"name": "Shell",
"bytes": "30813"
}
],
"symlink_target": ""
} |
"""
Python client for InfluxDB
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import itertools
import json
import socket
import requests
import requests.exceptions
from sys import version_info
from influxdb.line_protocol import make_lines, quote_ident, quote_literal
from influxdb.resultset import ResultSet
from .exceptions import InfluxDBClientError
from .exceptions import InfluxDBServerError
try:
xrange
except NameError:
xrange = range
if version_info[0] == 3:
from urllib.parse import urlparse
else:
from urlparse import urlparse
class InfluxDBClient(object):
"""The :class:`~.InfluxDBClient` object holds information necessary to
connect to InfluxDB. Requests can be made to InfluxDB directly through
the client.
:param host: hostname to connect to InfluxDB, defaults to 'localhost'
:type host: str
:param port: port to connect to InfluxDB, defaults to 8086
:type port: int
:param username: user to connect, defaults to 'root'
:type username: str
:param password: password of the user, defaults to 'root'
:type password: str
:param database: database name to connect to, defaults to None
:type database: str
:param ssl: use https instead of http to connect to InfluxDB, defaults to
False
:type ssl: bool
:param verify_ssl: verify SSL certificates for HTTPS requests, defaults to
False
:type verify_ssl: bool
:param timeout: number of seconds Requests will wait for your client to
establish a connection, defaults to None
:type timeout: int
:param use_udp: use UDP to connect to InfluxDB, defaults to False
:type use_udp: bool
:param udp_port: UDP port to connect to InfluxDB, defaults to 4444
:type udp_port: int
:param proxies: HTTP(S) proxy to use for Requests, defaults to {}
:type proxies: dict
"""
def __init__(self,
host='localhost',
port=8086,
username='root',
password='root',
database=None,
ssl=False,
verify_ssl=False,
timeout=None,
use_udp=False,
udp_port=4444,
proxies=None,
):
"""Construct a new InfluxDBClient object."""
self.__host = host
self.__port = int(port)
self._username = username
self._password = password
self._database = database
self._timeout = timeout
self._verify_ssl = verify_ssl
self.use_udp = use_udp
self.udp_port = udp_port
self._session = requests.Session()
if use_udp:
self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._scheme = "http"
if ssl is True:
self._scheme = "https"
if proxies is None:
self._proxies = {}
else:
self._proxies = proxies
self.__baseurl = "{0}://{1}:{2}".format(
self._scheme,
self._host,
self._port)
self._headers = {
'Content-type': 'application/json',
'Accept': 'text/plain'
}
@property
def _baseurl(self):
return self._get_baseurl()
def _get_baseurl(self):
return self.__baseurl
@property
def _host(self):
return self._get_host()
def _get_host(self):
return self.__host
@property
def _port(self):
return self._get_port()
def _get_port(self):
return self.__port
@classmethod
def from_DSN(cls, dsn, **kwargs):
"""Return an instance of :class:`~.InfluxDBClient` from the provided
data source name. Supported schemes are "influxdb", "https+influxdb"
and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient`
constructor may also be passed to this method.
:param dsn: data source name
:type dsn: string
:param kwargs: additional parameters for `InfluxDBClient`
:type kwargs: dict
:raises ValueError: if the provided DSN has any unexpected values
:Example:
::
>> cli = InfluxDBClient.from_DSN('influxdb://username:password@\
localhost:8086/databasename', timeout=5)
>> type(cli)
<class 'influxdb.client.InfluxDBClient'>
>> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\
localhost:8086/databasename', timeout=5, udp_port=159)
>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli))
http://localhost:8086 - True 159
.. note:: parameters provided in `**kwargs` may override dsn parameters
.. note:: when using "udp+influxdb" the specified port (if any) will
be used for the TCP connection; specify the UDP port with the
additional `udp_port` parameter (cf. examples).
"""
init_args = parse_dsn(dsn)
host, port = init_args.pop('hosts')[0]
init_args['host'] = host
init_args['port'] = port
init_args.update(kwargs)
return cls(**init_args)
def switch_database(self, database):
"""Change the client's database.
:param database: the name of the database to switch to
:type database: str
"""
self._database = database
def switch_user(self, username, password):
"""Change the client's username.
:param username: the username to switch to
:type username: str
:param password: the password for the username
:type password: str
"""
self._username = username
self._password = password
def request(self, url, method='GET', params=None, data=None,
expected_response_code=200, headers=None):
"""Make a HTTP request to the InfluxDB API.
:param url: the path of the HTTP request, e.g. write, query, etc.
:type url: str
:param method: the HTTP method for the request, defaults to GET
:type method: str
:param params: additional parameters for the request, defaults to None
:type params: dict
:param data: the data of the request, defaults to None
:type data: str
:param expected_response_code: the expected response code of
the request, defaults to 200
:type expected_response_code: int
:returns: the response from the request
:rtype: :class:`requests.Response`
:raises InfluxDBServerError: if the response code is any server error
code (5xx)
:raises InfluxDBClientError: if the response code is not the
same as `expected_response_code` and is not a server error code
"""
url = "{0}/{1}".format(self._baseurl, url)
if headers is None:
headers = self._headers
if params is None:
params = {}
if isinstance(data, (dict, list)):
data = json.dumps(data)
# Try to send the request a maximum of three times. (see #103)
# TODO (aviau): Make this configurable.
for i in range(0, 3):
try:
response = self._session.request(
method=method,
url=url,
auth=(self._username, self._password),
params=params,
data=data,
headers=headers,
proxies=self._proxies,
verify=self._verify_ssl,
timeout=self._timeout
)
break
except requests.exceptions.ConnectionError as e:
if i < 2:
continue
else:
raise e
if response.status_code >= 500 and response.status_code < 600:
raise InfluxDBServerError(response.content)
elif response.status_code == expected_response_code:
return response
else:
raise InfluxDBClientError(response.content, response.status_code)
def write(self, data, params=None, expected_response_code=204,
protocol='json'):
"""Write data to InfluxDB.
:param data: the data to be written
:type data: (if protocol is 'json') dict
(if protocol is 'line') sequence of line protocol strings
:param params: additional parameters for the request, defaults to None
:type params: dict
:param expected_response_code: the expected response code of the write
operation, defaults to 204
:type expected_response_code: int
:param protocol: protocol of input data, either 'json' or 'line'
:type protocol: str
:returns: True, if the write operation is successful
:rtype: bool
"""
headers = self._headers
headers['Content-type'] = 'application/octet-stream'
if params:
precision = params.get('precision')
else:
precision = None
if protocol == 'json':
data = make_lines(data, precision).encode('utf-8')
elif protocol == 'line':
data = ('\n'.join(data) + '\n').encode('utf-8')
self.request(
url="write",
method='POST',
params=params,
data=data,
expected_response_code=expected_response_code,
headers=headers
)
return True
def query(self,
query,
params=None,
epoch=None,
expected_response_code=200,
database=None,
raise_errors=True):
"""Send a query to InfluxDB.
:param query: the actual query string
:type query: str
:param params: additional parameters for the request, defaults to {}
:type params: dict
:param expected_response_code: the expected status code of response,
defaults to 200
:type expected_response_code: int
:param database: database to query, defaults to None
:type database: str
:param raise_errors: Whether or not to raise exceptions when InfluxDB
returns errors, defaults to True
:type raise_errors: bool
:returns: the queried data
:rtype: :class:`~.ResultSet`
"""
if params is None:
params = {}
params['q'] = query
params['db'] = database or self._database
if epoch is not None:
params['epoch'] = epoch
response = self.request(
url="query",
method='GET',
params=params,
data=None,
expected_response_code=expected_response_code
)
data = response.json()
results = [
ResultSet(result, raise_errors=raise_errors)
for result
in data.get('results', [])
]
# TODO(aviau): Always return a list. (This would be a breaking change)
if len(results) == 1:
return results[0]
else:
return results
def write_points(self,
points,
time_precision=None,
database=None,
retention_policy=None,
tags=None,
batch_size=None,
protocol='json'
):
"""Write to multiple time series names.
:param points: the list of points to be written in the database
:type points: list of dictionaries, each dictionary represents a point
:type data: (if protocol is 'json') list of dicts, where each dict
represents a point.
(if protocol is 'line') sequence of line protocol strings.
:param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None
:type time_precision: str
:param database: the database to write the points to. Defaults to
the client's current database
:type database: str
:param tags: a set of key-value pairs associated with each point. Both
keys and values must be strings. These are shared tags and will be
merged with point-specific tags, defaults to None
:type tags: dict
:param retention_policy: the retention policy for the points. Defaults
to None
:type retention_policy: str
:param batch_size: value to write the points in batches
instead of all at one time. Useful for when doing data dumps from
one database to another or when doing a massive write operation,
defaults to None
:type batch_size: int
:param protocol: Protocol for writing data. Either 'line' or 'json'.
:type protocol: str
:returns: True, if the operation is successful
:rtype: bool
.. note:: if no retention policy is specified, the default retention
policy for the database is used
"""
if batch_size and batch_size > 0:
for batch in self._batches(points, batch_size):
self._write_points(points=batch,
time_precision=time_precision,
database=database,
retention_policy=retention_policy,
tags=tags, protocol=protocol)
return True
else:
return self._write_points(points=points,
time_precision=time_precision,
database=database,
retention_policy=retention_policy,
tags=tags, protocol=protocol)
def _batches(self, iterable, size):
iterator = iter(iterable)
while True:
batch_iterator = itertools.islice(iterator, size)
yield itertools.chain([next(batch_iterator)], batch_iterator)
def _write_points(self,
points,
time_precision,
database,
retention_policy,
tags,
protocol='json'):
if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]:
raise ValueError(
"Invalid time precision is given. "
"(use 'n', 'u', 'ms', 's', 'm' or 'h')")
if self.use_udp and time_precision and time_precision != 's':
raise ValueError(
"InfluxDB only supports seconds precision for udp writes"
)
if protocol == 'json':
data = {
'points': points
}
if tags is not None:
data['tags'] = tags
else:
data = points
params = {
'db': database or self._database
}
if time_precision is not None:
params['precision'] = time_precision
if retention_policy is not None:
params['rp'] = retention_policy
if self.use_udp:
self.send_packet(data, protocol=protocol)
else:
self.write(
data=data,
params=params,
expected_response_code=204,
protocol=protocol
)
return True
def get_list_database(self):
"""Get the list of databases in InfluxDB.
:returns: all databases in InfluxDB
:rtype: list of dictionaries
:Example:
::
>> dbs = client.get_list_database()
>> dbs
[{u'name': u'db1'}, {u'name': u'db2'}, {u'name': u'db3'}]
"""
return list(self.query("SHOW DATABASES").get_points())
def create_database(self, dbname):
"""Create a new database in InfluxDB.
:param dbname: the name of the database to create
:type dbname: str
"""
self.query("CREATE DATABASE {0}".format(quote_ident(dbname)))
def drop_database(self, dbname):
"""Drop a database from InfluxDB.
:param dbname: the name of the database to drop
:type dbname: str
"""
self.query("DROP DATABASE {0}".format(quote_ident(dbname)))
def create_retention_policy(self, name, duration, replication,
database=None, default=False):
"""Create a retention policy for a database.
:param name: the name of the new retention policy
:type name: str
:param duration: the duration of the new retention policy.
Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported
and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
respectively. For infinite retention – meaning the data will
never be deleted – use 'INF' for duration.
The minimum retention period is 1 hour.
:type duration: str
:param replication: the replication of the retention policy
:type replication: str
:param database: the database for which the retention policy is
created. Defaults to current client's database
:type database: str
:param default: whether or not to set the policy as default
:type default: bool
"""
query_string = \
"CREATE RETENTION POLICY {0} ON {1} " \
"DURATION {2} REPLICATION {3}".format(
quote_ident(name), quote_ident(database or self._database),
duration, replication)
if default is True:
query_string += " DEFAULT"
self.query(query_string)
def alter_retention_policy(self, name, database=None,
duration=None, replication=None, default=None):
"""Mofidy an existing retention policy for a database.
:param name: the name of the retention policy to modify
:type name: str
:param database: the database for which the retention policy is
modified. Defaults to current client's database
:type database: str
:param duration: the new duration of the existing retention policy.
Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported
and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
respectively. For infinite retention – meaning the data will
never be deleted – use 'INF' for duration.
The minimum retention period is 1 hour.
:type duration: str
:param replication: the new replication of the existing
retention policy
:type replication: str
:param default: whether or not to set the modified policy as default
:type default: bool
.. note:: at least one of duration, replication, or default flag
should be set. Otherwise the operation will fail.
"""
query_string = (
"ALTER RETENTION POLICY {0} ON {1}"
).format(quote_ident(name), quote_ident(database or self._database))
if duration:
query_string += " DURATION {0}".format(duration)
if replication:
query_string += " REPLICATION {0}".format(replication)
if default is True:
query_string += " DEFAULT"
self.query(query_string)
def drop_retention_policy(self, name, database=None):
"""Drop an existing retention policy for a database.
:param name: the name of the retention policy to drop
:type name: str
:param database: the database for which the retention policy is
dropped. Defaults to current client's database
:type database: str
"""
query_string = (
"DROP RETENTION POLICY {0} ON {1}"
).format(quote_ident(name), quote_ident(database or self._database))
self.query(query_string)
def get_list_retention_policies(self, database=None):
"""Get the list of retention policies for a database.
:param database: the name of the database, defaults to the client's
current database
:type database: str
:returns: all retention policies for the database
:rtype: list of dictionaries
:Example:
::
>> ret_policies = client.get_list_retention_policies('my_db')
>> ret_policies
[{u'default': True,
u'duration': u'0',
u'name': u'default',
u'replicaN': 1}]
"""
if not (database or self._database):
raise InfluxDBClientError(
"get_list_retention_policies() requires a database as a "
"parameter or the client to be using a database")
rsp = self.query(
"SHOW RETENTION POLICIES ON {0}".format(
quote_ident(database or self._database))
)
return list(rsp.get_points())
def get_list_users(self):
"""Get the list of all users in InfluxDB.
:returns: all users in InfluxDB
:rtype: list of dictionaries
:Example:
::
>> users = client.get_list_users()
>> users
[{u'admin': True, u'user': u'user1'},
{u'admin': False, u'user': u'user2'},
{u'admin': False, u'user': u'user3'}]
"""
return list(self.query("SHOW USERS").get_points())
def create_user(self, username, password, admin=False):
"""Create a new user in InfluxDB
:param username: the new username to create
:type username: str
:param password: the password for the new user
:type password: str
:param admin: whether the user should have cluster administration
privileges or not
:type admin: boolean
"""
text = "CREATE USER {0} WITH PASSWORD {1}".format(
quote_ident(username), quote_literal(password))
if admin:
text += ' WITH ALL PRIVILEGES'
self.query(text)
def drop_user(self, username):
"""Drop a user from InfluxDB.
:param username: the username to drop
:type username: str
"""
text = "DROP USER {0}".format(quote_ident(username))
self.query(text)
def set_user_password(self, username, password):
"""Change the password of an existing user.
:param username: the username who's password is being changed
:type username: str
:param password: the new password for the user
:type password: str
"""
text = "SET PASSWORD FOR {0} = {1}".format(
quote_ident(username), quote_literal(password))
self.query(text)
def delete_series(self, database=None, measurement=None, tags=None):
"""Delete series from a database. Series can be filtered by
measurement and tags.
:param database: the database from which the series should be
deleted, defaults to client's current database
:type database: str
:param measurement: Delete all series from a measurement
:type id: str
:param tags: Delete all series that match given tags
:type id: dict
"""
database = database or self._database
query_str = 'DROP SERIES'
if measurement:
query_str += ' FROM {0}'.format(quote_ident(measurement))
if tags:
tag_eq_list = ["{0}={1}".format(quote_ident(k), quote_literal(v))
for k, v in tags.items()]
query_str += ' WHERE ' + ' AND '.join(tag_eq_list)
self.query(query_str, database=database)
def grant_admin_privileges(self, username):
"""Grant cluster administration privileges to a user.
:param username: the username to grant privileges to
:type username: str
.. note:: Only a cluster administrator can create/drop databases
and manage users.
"""
text = "GRANT ALL PRIVILEGES TO {0}".format(quote_ident(username))
self.query(text)
def revoke_admin_privileges(self, username):
"""Revoke cluster administration privileges from a user.
:param username: the username to revoke privileges from
:type username: str
.. note:: Only a cluster administrator can create/ drop databases
and manage users.
"""
text = "REVOKE ALL PRIVILEGES FROM {0}".format(quote_ident(username))
self.query(text)
def grant_privilege(self, privilege, database, username):
"""Grant a privilege on a database to a user.
:param privilege: the privilege to grant, one of 'read', 'write'
or 'all'. The string is case-insensitive
:type privilege: str
:param database: the database to grant the privilege on
:type database: str
:param username: the username to grant the privilege to
:type username: str
"""
text = "GRANT {0} ON {1} TO {2}".format(privilege,
quote_ident(database),
quote_ident(username))
self.query(text)
def revoke_privilege(self, privilege, database, username):
"""Revoke a privilege on a database from a user.
:param privilege: the privilege to revoke, one of 'read', 'write'
or 'all'. The string is case-insensitive
:type privilege: str
:param database: the database to revoke the privilege on
:type database: str
:param username: the username to revoke the privilege from
:type username: str
"""
text = "REVOKE {0} ON {1} FROM {2}".format(privilege,
quote_ident(database),
quote_ident(username))
self.query(text)
def get_list_privileges(self, username):
"""Get the list of all privileges granted to given user.
:param username: the username to get privileges of
:type username: str
:returns: all privileges granted to given user
:rtype: list of dictionaries
:Example:
::
>> privileges = client.get_list_privileges('user1')
>> privileges
[{u'privilege': u'WRITE', u'database': u'db1'},
{u'privilege': u'ALL PRIVILEGES', u'database': u'db2'},
{u'privilege': u'NO PRIVILEGES', u'database': u'db3'}]
"""
text = "SHOW GRANTS FOR {0}".format(quote_ident(username))
return list(self.query(text).get_points())
def send_packet(self, packet, protocol='json'):
"""Send an UDP packet.
:param packet: the packet to be sent
:type packet: (if protocol is 'json') dict
(if protocol is 'line') sequence of line protocol strings
:param protocol: protocol of input data, either 'json' or 'line'
:type protocol: str
"""
if protocol == 'json':
data = make_lines(packet).encode('utf-8')
elif protocol == 'line':
data = ('\n'.join(data) + '\n').encode('utf-8')
self.udp_socket.sendto(data, (self._host, self.udp_port))
def parse_dsn(dsn):
conn_params = urlparse(dsn)
init_args = {}
scheme_info = conn_params.scheme.split('+')
if len(scheme_info) == 1:
scheme = scheme_info[0]
modifier = None
else:
modifier, scheme = scheme_info
if scheme != 'influxdb':
raise ValueError('Unknown scheme "{0}".'.format(scheme))
if modifier:
if modifier == 'udp':
init_args['use_udp'] = True
elif modifier == 'https':
init_args['ssl'] = True
else:
raise ValueError('Unknown modifier "{0}".'.format(modifier))
netlocs = conn_params.netloc.split(',')
init_args['hosts'] = []
for netloc in netlocs:
parsed = _parse_netloc(netloc)
init_args['hosts'].append((parsed['host'], int(parsed['port'])))
init_args['username'] = parsed['username']
init_args['password'] = parsed['password']
if conn_params.path and len(conn_params.path) > 1:
init_args['database'] = conn_params.path[1:]
return init_args
def _parse_netloc(netloc):
info = urlparse("http://{0}".format(netloc))
return {'username': info.username or None,
'password': info.password or None,
'host': info.hostname or 'localhost',
'port': info.port or 8086}
| {
"content_hash": "9be8b21876252bf0b334ab0eb847202d",
"timestamp": "",
"source": "github",
"line_count": 829,
"max_line_length": 79,
"avg_line_length": 34.775633293124244,
"alnum_prop": 0.5671719449165771,
"repo_name": "Asimmetric/influxdb-python",
"id": "92310c78b9ef6566c52014db33c4a6209e9570e4",
"size": "28861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "influxdb/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "176"
},
{
"name": "Python",
"bytes": "262034"
},
{
"name": "Shell",
"bytes": "190"
}
],
"symlink_target": ""
} |
import datetime
import decimal
from dateutil.relativedelta import relativedelta
from django.db import transaction
from django.db.models import Sum
from django.http import HttpResponse
from django.utils.translation import ugettext_lazy as _
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import exceptions, status
from rest_framework.decorators import action
from rest_framework.response import Response
from waldur_core.core import validators as core_validators
from waldur_core.core import views as core_views
from waldur_core.structure import filters as structure_filters
from waldur_core.structure import models as structure_models
from waldur_core.structure import permissions as structure_permissions
from waldur_mastermind.common.utils import quantize_price
from . import filters, log, models, serializers, tasks, utils
class InvoiceViewSet(core_views.ReadOnlyActionsViewSet):
queryset = models.Invoice.objects.order_by('-year', '-month')
serializer_class = serializers.InvoiceSerializer
lookup_field = 'uuid'
filter_backends = (
structure_filters.GenericRoleFilter,
structure_filters.CustomerAccountingStartDateFilter,
DjangoFilterBackend,
)
filterset_class = filters.InvoiceFilter
def _is_invoice_created(invoice):
if invoice.state != models.Invoice.States.CREATED:
raise exceptions.ValidationError(
_('Notification only for the created invoice can be sent.')
)
@action(detail=True, methods=['post'])
def send_notification(self, request, uuid=None):
invoice = self.get_object()
tasks.send_invoice_notification.delay(invoice.uuid.hex)
return Response(
{
'detail': _(
'Invoice notification sending has been successfully scheduled.'
)
},
status=status.HTTP_200_OK,
)
send_notification_permissions = [structure_permissions.is_staff]
send_notification_validators = [_is_invoice_created]
@action(detail=True)
def pdf(self, request, uuid=None):
invoice = self.get_object()
file = utils.create_invoice_pdf(invoice)
file_response = HttpResponse(file, content_type='application/pdf')
filename = invoice.get_filename()
file_response[
'Content-Disposition'
] = 'attachment; filename="{filename}"'.format(filename=filename)
return file_response
@transaction.atomic
@action(detail=True, methods=['post'])
def paid(self, request, uuid=None):
invoice = self.get_object()
if request.data:
serializer = serializers.PaidSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
try:
profile = models.PaymentProfile.objects.get(
is_active=True, organization=invoice.customer
)
except models.PaymentProfile.DoesNotExist:
raise exceptions.ValidationError(
_('The active profile for this customer does not exist.')
)
payment = models.Payment.objects.create(
date_of_payment=serializer.validated_data['date'],
sum=invoice.total_current,
profile=profile,
invoice=invoice,
)
proof = serializer.validated_data.get('proof')
if proof:
payment.proof = proof
payment.save()
log.event_logger.invoice.info(
'Payment for invoice ({month}/{year}) has been added."',
event_type='payment_created',
event_context={
'month': invoice.month,
'year': invoice.year,
'customer': invoice.customer,
},
)
invoice.state = models.Invoice.States.PAID
invoice.save(update_fields=['state'])
return Response(status=status.HTTP_200_OK)
paid_permissions = [structure_permissions.is_staff]
paid_validators = [core_validators.StateValidator(models.Invoice.States.CREATED)]
@action(detail=True)
def stats(self, request, uuid=None):
invoice = self.get_object()
offerings = {}
for item in invoice.items.all():
if not item.resource:
continue
resource = item.resource
offering = resource.offering
customer = offering.customer
service_category_title = offering.category.title
service_provider_name = customer.name
service_provider_uuid = customer.serviceprovider.uuid.hex
if offering.uuid.hex not in offerings.keys():
offerings[offering.uuid.hex] = {
'offering_name': offering.name,
'aggregated_cost': item.total,
'service_category_title': service_category_title,
'service_provider_name': service_provider_name,
'service_provider_uuid': service_provider_uuid,
}
else:
offerings[offering.uuid.hex]['aggregated_cost'] += item.total
queryset = [dict(uuid=key, **details) for (key, details) in offerings.items()]
for item in queryset:
item['aggregated_cost'] = quantize_price(
decimal.Decimal(item['aggregated_cost'])
)
page = self.paginate_queryset(queryset)
return self.get_paginated_response(page)
@action(detail=False)
def growth(self, request):
if not self.request.user.is_staff and not request.user.is_support:
raise exceptions.PermissionDenied()
customers = structure_models.Customer.objects.all()
customers = structure_filters.AccountingStartDateFilter().filter_queryset(
request, customers, self
)
customers_count = 4
if 'customers_count' in request.query_params:
try:
customers_count = int(request.query_params['customers_count'])
except ValueError:
raise exceptions.ValidationError('customers_count is not a number')
if customers_count > 20:
raise exceptions.ValidationError(
'customers_count should not be greater than 20'
)
is_accounting_mode = request.query_params.get('accounting_mode') == 'accounting'
today = datetime.date.today()
current_month = today - relativedelta(months=12)
majors = list(
models.Invoice.objects.filter(
customer__in=customers, created__gte=current_month
)
.values('customer_id')
.annotate(total=Sum('current_cost'))
.order_by('-total')
.values_list('customer_id', flat=True)[:customers_count]
)
minors = customers.exclude(id__in=majors)
customer_periods = {}
total_periods = {}
other_periods = {}
for i in range(13):
invoices = models.Invoice.objects.filter(
year=current_month.year, month=current_month.month,
)
key = f'{current_month.year}-{current_month.month}'
row = customer_periods[key] = {}
subtotal = 0
for invoice in invoices.filter(customer_id__in=majors):
value = is_accounting_mode and invoice.price or invoice.total
subtotal += value
row[invoice.customer.uuid.hex] = value
other_periods[key] = sum(
is_accounting_mode and invoice.price or invoice.total
for invoice in invoices.filter(customer_id__in=minors)
)
total_periods[key] = subtotal + other_periods[key]
current_month += relativedelta(months=1)
result = {
'periods': total_periods.keys(),
'total_periods': total_periods.values(),
'other_periods': other_periods.values(),
'customer_periods': [
{
'name': customer.name,
'periods': [
customer_periods[period].get(customer.uuid.hex, 0)
for period in total_periods.keys()
],
}
for customer in structure_models.Customer.objects.filter(id__in=majors)
],
}
return Response(result, status=status.HTTP_200_OK)
class PaymentProfileViewSet(core_views.ActionsViewSet):
lookup_field = 'uuid'
filter_backends = (
structure_filters.GenericRoleFilter,
DjangoFilterBackend,
filters.PaymentProfileFilterBackend,
)
filterset_class = filters.PaymentProfileFilter
create_permissions = (
update_permissions
) = partial_update_permissions = destroy_permissions = enable_permissions = [
structure_permissions.is_staff
]
queryset = models.PaymentProfile.objects.all().order_by('name')
serializer_class = serializers.PaymentProfileSerializer
@action(detail=True, methods=['post'])
def enable(self, request, uuid=None):
profile = self.get_object()
profile.is_active = True
profile.save(update_fields=['is_active'])
return Response(
{'detail': _('Payment profile has been enabled.')},
status=status.HTTP_200_OK,
)
class PaymentViewSet(core_views.ActionsViewSet):
lookup_field = 'uuid'
filter_backends = (
structure_filters.GenericRoleFilter,
DjangoFilterBackend,
)
filterset_class = filters.PaymentFilter
create_permissions = (
update_permissions
) = (
partial_update_permissions
) = (
destroy_permissions
) = link_to_invoice_permissions = unlink_from_invoice_permissions = [
structure_permissions.is_staff
]
queryset = models.Payment.objects.all().order_by('created')
serializer_class = serializers.PaymentSerializer
@action(detail=True, methods=['post'])
def link_to_invoice(self, request, uuid=None):
payment = self.get_object()
serializer = self.get_serializer_class()(data=request.data)
serializer.is_valid(raise_exception=True)
invoice = serializer.validated_data['invoice']
if invoice.customer != payment.profile.organization:
raise exceptions.ValidationError(
_('The passed invoice does not belong to the selected customer.')
)
payment.invoice = invoice
payment.save(update_fields=['invoice'])
log.event_logger.invoice.info(
'Payment for invoice ({month}/{year}) has been added.',
event_type='payment_created',
event_context={
'month': invoice.month,
'year': invoice.year,
'customer': invoice.customer,
},
)
return Response(
{'detail': _('An invoice has been linked to payment.')},
status=status.HTTP_200_OK,
)
def _link_to_invoice_exists(payment):
if payment.invoice:
raise exceptions.ValidationError(_('Link to an invoice exists.'))
link_to_invoice_validators = [_link_to_invoice_exists]
link_to_invoice_serializer_class = serializers.LinkToInvoiceSerializer
def _link_to_invoice_does_not_exist(payment):
if not payment.invoice:
raise exceptions.ValidationError(_('Link to an invoice does not exist.'))
@action(detail=True, methods=['post'])
def unlink_from_invoice(self, request, uuid=None):
payment = self.get_object()
invoice = payment.invoice
payment.invoice = None
payment.save(update_fields=['invoice'])
log.event_logger.invoice.info(
'Payment for invoice ({month}/{year}) has been removed.',
event_type='payment_removed',
event_context={
'month': invoice.month,
'year': invoice.year,
'customer': invoice.customer,
},
)
return Response(
{'detail': _('An invoice has been unlinked from payment.')},
status=status.HTTP_200_OK,
)
unlink_from_invoice_validators = [_link_to_invoice_does_not_exist]
def perform_create(self, serializer):
super(PaymentViewSet, self).perform_create(serializer)
payment = serializer.instance
log.event_logger.payment.info(
'Payment for {customer_name} in the amount of {amount} has been added.',
event_type='payment_added',
event_context={
'amount': payment.sum,
'customer': payment.profile.organization,
},
)
def perform_destroy(self, instance):
customer = instance.profile.organization
amount = instance.sum
super(PaymentViewSet, self).perform_destroy(instance)
log.event_logger.payment.info(
'Payment for {customer_name} in the amount of {amount} has been removed.',
event_type='payment_removed',
event_context={'amount': amount, 'customer': customer,},
)
| {
"content_hash": "8a0864abd5eddda88463996933c030ac",
"timestamp": "",
"source": "github",
"line_count": 368,
"max_line_length": 88,
"avg_line_length": 36.19836956521739,
"alnum_prop": 0.6019067637564748,
"repo_name": "opennode/nodeconductor-assembly-waldur",
"id": "01cc5c8244075721fcea84c6d488514f6d3a201b",
"size": "13321",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/waldur_mastermind/invoices/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1624"
},
{
"name": "Python",
"bytes": "412263"
},
{
"name": "Shell",
"bytes": "2031"
}
],
"symlink_target": ""
} |
from ConfigParser import SafeConfigParser
import os
from zope.interface import moduleProvides
from carapace import meta
from carapace.sdk import interfaces
moduleProvides(interfaces.IConfig)
class Config(object):
pass
# Main
main = Config()
main.config = Config()
main.config.datadir = os.path.expanduser("~/.%s" % meta.library_name)
main.config.localfile = "config.ini"
main.config.installedfile = os.path.join(
main.config.datadir, main.config.localfile)
# Internal SSH Server
ssh = Config()
ssh.servicename = meta.description
ssh.ip = "127.0.0.1"
ssh.port = 2222
ssh.pidfile = "twistd.pid"
ssh.username = "root"
ssh.keydir = os.path.join(main.config.datadir, "ssh")
ssh.privkey = "id_rsa"
ssh.pubkey = "id_rsa.pub"
ssh.localdir = "~/.ssh"
ssh.userdirtemplate = os.path.join(main.config.datadir, "users", "{{USER}}")
ssh.userauthkeys = os.path.join(ssh.userdirtemplate, "authorized_keys")
ssh.usesystemkeys = False
ssh.banner = """:
: Welcome to
:_________
:\_ ___ \_____ ____________ ___________ ____ ____
:/ \ \/\__ \\\\_ __ \__ \ \____ \__ \ _/ ___\/ __ \\
:\ \____/ __ \| | \// __ \| |_> > __ \\\\ \__\ ___/
: \______ (____ /__| (____ / __(____ /\___ >___ >
: \/ \/ \/|__| \/ \/ \/
:
: You have logged into a Carapace Shell Server.
: {{HELP}}
:
: Enjoy!
:
"""
class Configurator(object):
"""
"""
def __init__(self, main=None, ssh=None):
self.main = main
self.ssh = ssh
def buildDefaults(self):
config = SafeConfigParser()
config.add_section("SSH")
config.set("SSH", "servicename", self.ssh.servicename)
config.set("SSH", "ip", str(self.ssh.ip))
config.set("SSH", "port", str(self.ssh.port))
config.set("SSH", "pidfile", self.ssh.pidfile)
config.set("SSH", "username", self.ssh.username)
config.set("SSH", "keydir", self.ssh.keydir)
config.set("SSH", "privkey", self.ssh.privkey)
config.set("SSH", "pubkey", self.ssh.pubkey)
config.set("SSH", "localdir", self.ssh.localdir)
config.set("SSH", "userdirtemplate", self.ssh.userdirtemplate)
config.set("SSH", "userauthkeys", self.ssh.userauthkeys)
config.set("SSH", "usesystemkeys", str(self.ssh.usesystemkeys))
config.set("SSH", "banner", self.ssh.banner)
return config
def getConfigFile(self):
if os.path.exists(self.main.config.localfile):
return self.main.config.localfile
if not os.path.exists(self.main.config.datadir):
os.mkdir(os.path.expanduser(self.main.config.datadir))
return self.main.config.installedfile
def writeDefaults(self):
config = self.buildDefaults()
with open(self.getConfigFile(), "wb") as configFile:
config.write(configFile)
def getConfig(self):
configFile = self.getConfigFile()
if not os.path.exists(configFile):
self.writeDefaults()
return
config = SafeConfigParser()
config.read(configFile)
return config
def updateConfig(self):
"""
If the configfile doesn't exist, this method will (indirectly) create
it and exit.
If it does exist, it will load the config values from the file (which
may be different from those defined be default in this module), and
update the in-memory config values with what it reads from the file.
"""
config = self.getConfig()
if not config:
return
self.ssh.servicename = config.get("SSH", "servicename")
self.ssh.ip = config.get("SSH", "ip")
self.ssh.port = int(config.get("SSH", "port"))
self.ssh.pidfile = config.get("SSH", "pidfile")
self.ssh.username = str(config.get("SSH", "username"))
self.ssh.keydir = config.get("SSH", "keydir")
self.ssh.privkey = config.get("SSH", "privkey")
self.ssh.pubkey = config.get("SSH", "pubkey")
self.ssh.localdir = config.get("SSH", "localdir")
self.ssh.userdirtemplate = config.get("SSH", "userdirtemplate")
self.ssh.userauthkeys = config.get("SSH", "userauthkeys")
self.ssh.usesystemkeys = eval(config.get("SSH", "usesystemkeys"))
self.ssh.banner = str(config.get("SSH", "banner"))
return config
def configuratorFactory():
return Configurator(main, ssh)
def updateConfig():
configurator = configuratorFactory()
configurator.updateConfig()
| {
"content_hash": "bbf6c31e1a6eade10d9f21cae5c543d5",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 77,
"avg_line_length": 33.13970588235294,
"alnum_prop": 0.5988462391834923,
"repo_name": "oubiwann/carapace",
"id": "f12045b0edeb221b46307f80ec6cfe1ab8d2d5eb",
"size": "4507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "carapace/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "40633"
}
],
"symlink_target": ""
} |
import torch
from torch.utils.mobile_optimizer import optimize_for_mobile
from torchvision.models.detection import (
fasterrcnn_mobilenet_v3_large_320_fpn,
FasterRCNN_MobileNet_V3_Large_320_FPN_Weights,
)
print(torch.__version__)
model = fasterrcnn_mobilenet_v3_large_320_fpn(
weights=FasterRCNN_MobileNet_V3_Large_320_FPN_Weights.DEFAULT,
box_score_thresh=0.7,
rpn_post_nms_top_n_test=100,
rpn_score_thresh=0.4,
rpn_pre_nms_top_n_test=150,
)
model.eval()
script_model = torch.jit.script(model)
opt_script_model = optimize_for_mobile(script_model)
opt_script_model.save("app/src/main/assets/frcnn_mnetv3.pt")
| {
"content_hash": "140dfc17dd36985cbf4808e3ec6faec4",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 66,
"avg_line_length": 30.476190476190474,
"alnum_prop": 0.746875,
"repo_name": "pytorch/vision",
"id": "f99933e9a9ddf153c3085d5cd910f89f2919dea9",
"size": "640",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "android/test_app/make_assets.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "20242"
},
{
"name": "C",
"bytes": "930"
},
{
"name": "C++",
"bytes": "366825"
},
{
"name": "CMake",
"bytes": "18266"
},
{
"name": "Cuda",
"bytes": "90174"
},
{
"name": "Dockerfile",
"bytes": "1608"
},
{
"name": "Java",
"bytes": "21833"
},
{
"name": "Objective-C",
"bytes": "2715"
},
{
"name": "Objective-C++",
"bytes": "3284"
},
{
"name": "PowerShell",
"bytes": "2874"
},
{
"name": "Python",
"bytes": "3952070"
},
{
"name": "Ruby",
"bytes": "1086"
},
{
"name": "Shell",
"bytes": "35660"
}
],
"symlink_target": ""
} |
from collections import defaultdict
from datetime import timedelta
import json
import logging
import markus
from simple_pid import PID
from sqlalchemy.exc import OperationalError
from ichnaea import util
METRICS = markus.get_metrics()
LOGGER = logging.getLogger(__name__)
class ApiKeyLimits:
def __init__(self, task):
self.task = task
def __call__(self):
today = util.utcnow().strftime("%Y%m%d")
keys = self.task.redis_client.keys("apilimit:*:" + today)
values = []
if keys:
values = self.task.redis_client.mget(keys)
keys = [k.decode("utf-8").split(":")[1:3] for k in keys]
for (api_key, path), value in zip(keys, values):
METRICS.gauge(
"api.limit", value=int(value), tags=["key:" + api_key, "path:" + path]
)
class ApiUsers:
def __init__(self, task):
self.task = task
def __call__(self):
days = {}
today = util.utcnow().date()
for i in range(0, 7):
day = today - timedelta(days=i)
days[i] = day.strftime("%Y-%m-%d")
metrics = defaultdict(list)
for key in self.task.redis_client.scan_iter(match="apiuser:*", count=100):
_, api_type, api_name, day = key.decode("ascii").split(":")
if day not in days.values():
# delete older entries
self.task.redis_client.delete(key)
continue
if day == days[0]:
metrics[(api_type, api_name, "1d")].append(key)
metrics[(api_type, api_name, "7d")].append(key)
for parts, keys in metrics.items():
api_type, api_name, interval = parts
value = self.task.redis_client.pfcount(*keys)
METRICS.gauge(
"%s.user" % api_type,
value=value,
tags=["key:%s" % api_name, "interval:%s" % interval],
)
class QueueSizeAndRateControl:
"""Generate gauge metrics for queue sizes, and tune sample rate.
This covers the celery task queues and the data queues.
There are dynamically created export queues, with names like
"export_queue_internal", or maybe "queue_export_internal", which are no
longer monitored. See ichnaea/models/config.py for queue generation.
The station data queues represent the backlog, and the rate controller,
if enabled, attempts to keep the backlog size near a target size by
adjusting the global locate sample rate.
Observation processing requires transactions, and the rate of new
transactions can exceed the MySQL purge rate. If allowed, monitor
this as well, and pause observation processing if it gets too high.
Monitoring the transaction history length requires the MySQL user to have
the PROCESS permission.
"""
def __init__(self, task):
self.task = task
self.rate = 0
self.rc_enabled = None
self.rc_target = None
self.rc_kp = None
self.rc_ki = None
self.rc_kd = None
self.rc_state = None
self.rc_controller = None
self.trx_history_purging = None
self.trx_history_min = None
self.trx_history_max = None
def __call__(self):
"""Load components, set the rate, and send metrics."""
# Load the rate controller from Redis
self.load_rate_controller()
# Get all queue sizes, some of which are observation queues
queue_sizes = self.get_queue_sizes()
# Emit tagged metrics for all queues
# The sum of certain queue sizes is the observation backlog
backlog = self.emit_queue_metrics_and_get_backlog(queue_sizes)
# Read the MySQL InnoDB transaction history length
# Observation processing can cause history to grow faster than purged
trx_history_length = self.query_transaction_history_length()
# Use the rate controller to update the global rate
# Set the rate to 0% while transaction history is too high
# Otherwise, attempt to find a rate that maintains a steady backlog
if self.rc_enabled:
self.run_rate_controller(backlog, trx_history_length)
# Emit the current (controlled or manual) global rate
METRICS.gauge("rate_control.locate", self.rate)
def load_rate_controller(self):
"""Load rate controller parameters from Redis-stored strings."""
with self.task.redis_client.pipeline() as pipe:
pipe.get("global_locate_sample_rate")
pipe.get("rate_controller_enabled")
pipe.get("rate_controller_target")
pipe.get("rate_controller_kp")
pipe.get("rate_controller_ki")
pipe.get("rate_controller_kd")
pipe.get("rate_controller_state")
pipe.get("rate_controller_trx_purging")
pipe.get("rate_controller_trx_min")
pipe.get("rate_controller_trx_max")
(
rate,
rc_enabled,
rc_target,
rc_kp,
rc_ki,
rc_kd,
rc_state,
rc_trx_purging,
rc_trx_min,
rc_trx_max,
) = pipe.execute()
try:
self.rate = float(rate)
except (TypeError, ValueError):
self.rate = 100.0
def load_param(param_type, name, raw_value, range_check, default=None):
"""
Load and validate a parameter
Reset invalid parameters in Redis
Returns (value, is_valid)
"""
if raw_value is None and default is not None:
self.task.redis_client.set(name, default)
raw_value = default
try:
val = param_type(raw_value)
if not range_check(val):
raise ValueError("out of range")
return val, True
except (TypeError, ValueError):
log_fmt = "Redis key '%s' has invalid value %r, disabling rate control."
LOGGER.warning(log_fmt, name, raw_value)
self.task.redis_client.set(name, default or 0)
return None, False
# Validate rate_controller_enabled, exit early if disabled
self.rc_enabled, valid = load_param(
int, "rate_controller_enabled", rc_enabled, lambda x: x in (0, 1)
)
if not self.rc_enabled:
self.task.redis_client.set("rate_controller_state", "{}")
return
# Validate simple PID parameters, exit if any are invalid
valid = [True] * 7
self.rc_target, valid[0] = load_param(
int, "rate_controller_target", rc_target, lambda x: x >= 0
)
self.rc_kp, valid[1] = load_param(
float, "rate_controller_kp", rc_kp, lambda x: x >= 0, 8
)
self.rc_ki, valid[2] = load_param(
float, "rate_controller_ki", rc_ki, lambda x: x >= 0, 0
)
self.rc_kd, valid[3] = load_param(
float, "rate_controller_kd", rc_kd, lambda x: x >= 0, 0
)
self.trx_history_purging, valid[4] = load_param(
int, "rate_controller_trx_purging", rc_trx_purging, lambda x: x in {0, 1}, 0
)
self.trx_history_min, valid[5] = load_param(
int, "rate_controller_trx_min", rc_trx_min, lambda x: x > 0, 1000
)
self.trx_history_max, valid[5] = load_param(
int,
"rate_controller_trx_max",
rc_trx_max,
lambda x: x > self.trx_history_min,
1000000,
)
if not all(valid):
self.task.redis_client.set("rate_controller_enabled", 0)
self.task.redis_client.set("rate_controller_state", "{}")
self.rc_enabled = False
return
# State is None if new, or a JSON-encoded string
try:
self.rc_state = json.loads(rc_state.decode("utf8"))
except AttributeError:
self.rc_state = {}
self.rc_controller = PID(
self.rc_kp,
self.rc_ki,
self.rc_kd,
self.rc_target,
sample_time=None,
output_limits=(0, self.rc_target),
)
if self.rc_state.get("state") == "running":
# Update controller with previous state
try:
p_term = self.rc_state["p_term"]
i_term = self.rc_state["i_term"]
d_term = self.rc_state["d_term"]
last_input = self.rc_state["last_input"]
last_output = self.rc_state["last_output"]
last_time = self.rc_state["last_time"]
except KeyError:
# Skip loading state, start with fresh controller
return
self.rc_controller._proportional = p_term
self.rc_controller._integral = i_term
self.rc_controller._derivative = d_term
self.rc_controller._last_input = last_input
self.rc_controller._last_output = last_output
self.rc_controller._last_time = last_time
# Apply limits, which may clamp integral and last output
self.rc_controller.output_limits = (0, self.rc_target)
def get_queue_sizes(self):
"""Measure the observation queue sizes (redis llen)."""
names = list(self.task.app.all_queues.keys())
with self.task.redis_client.pipeline() as pipe:
for name in names:
pipe.llen(name)
queue_lengths = pipe.execute()
return {name: value for name, value in zip(names, queue_lengths)}
def emit_queue_metrics_and_get_backlog(self, queue_sizes):
"""Emit metrics for queue sizes, and return the observation backlog."""
backlog = 0
for name, size in queue_sizes.items():
tags_list = ["queue:" + name]
for tag_name, tag_val in self.task.app.all_queues[name].items():
tags_list.append(f"{tag_name}:{tag_val}")
if tag_name == "data_type" and tag_val in ("bluetooth", "cell", "wifi"):
backlog += size
METRICS.gauge("queue", size, tags=tags_list)
return backlog
def query_transaction_history_length(self):
"""Get the MySQL InnoDB transaction history length, if allowed."""
sql = (
"SELECT count FROM information_schema.innodb_metrics"
" WHERE name = 'trx_rseg_history_len';"
)
length = None
with self.task.db_session() as session:
try:
length = session.scalar(sql)
except OperationalError as err:
# Ignore 1227, 'Access denied; you need (at least one of) the PROCESS privilege(s) for this operation')"
if err.orig.args[0] != 1227:
raise
else:
METRICS.gauge("trx_history.length", length)
return length
def run_rate_controller(self, backlog, trx_history_length):
"""Update the rate, monitor and store controller state."""
trx_purging_mode = self.trx_history_purging_mode(trx_history_length)
if trx_purging_mode:
self.rate = 0.0
rc_state = None
else:
self.update_rate_with_rate_controller(backlog)
rc_state = self.freeze_controller_state()
with self.task.redis_client.pipeline() as pipe:
pipe.set("global_locate_sample_rate", self.rate)
pipe.set(
"rate_controller_trx_purging", 1 if self.trx_history_purging else 0
)
if rc_state is not None:
pipe.set("rate_controller_state", rc_state)
pipe.execute()
if trx_history_length is not None:
METRICS.gauge("trx_history.min", self.trx_history_min)
METRICS.gauge("trx_history.max", self.trx_history_max)
METRICS.gauge("trx_history.purging", 1 if self.trx_history_purging else 0)
METRICS.gauge("rate_control.locate.target", self.rc_target)
METRICS.gauge("rate_control.locate.kp", self.rc_kp)
METRICS.gauge("rate_control.locate.ki", self.rc_ki)
METRICS.gauge("rate_control.locate.kd", self.rc_kd)
p_term, i_term, d_term = self.rc_controller.components
METRICS.gauge("rate_control.locate.pterm", p_term)
METRICS.gauge("rate_control.locate.iterm", i_term)
METRICS.gauge("rate_control.locate.dterm", d_term)
def trx_history_purging_mode(self, trx_history_length):
"""If transaction history is high, enter purging mode until back to minimum."""
if (
trx_history_length is None
or self.trx_history_purging is None
or self.trx_history_min is None
or self.trx_history_max is None
):
return None
if trx_history_length > self.trx_history_max:
self.trx_history_purging = True
return True
elif self.trx_history_purging:
if trx_history_length < self.trx_history_min:
self.trx_history_purging = False
return False
return True
return False
def update_rate_with_rate_controller(self, backlog):
"""Generate a new sample rate."""
assert self.rc_controller
output = self.rc_controller(backlog)
self.rate = 100.0 * max(0.0, min(1.0, output / self.rc_target))
def freeze_controller_state(self):
"""Convert a PID controller to a JSON encoded string."""
assert self.rc_controller
p_term, i_term, d_term = self.rc_controller.components
state = {
"state": "running",
"p_term": p_term,
"i_term": i_term,
"d_term": d_term,
"last_output": self.rc_controller._last_output,
"last_input": self.rc_controller._last_input,
"last_time": self.rc_controller._last_time,
}
return json.dumps(state)
| {
"content_hash": "538cb56c88b062b5482f5226121fd223",
"timestamp": "",
"source": "github",
"line_count": 373,
"max_line_length": 120,
"avg_line_length": 37.84450402144772,
"alnum_prop": 0.5675828846698782,
"repo_name": "mozilla/ichnaea",
"id": "aed1fcf6ab77016dea4c4f27bea7296044a817f2",
"size": "14116",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "ichnaea/data/monitor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "34767"
},
{
"name": "Cython",
"bytes": "16678"
},
{
"name": "Dockerfile",
"bytes": "2819"
},
{
"name": "HTML",
"bytes": "32679"
},
{
"name": "JavaScript",
"bytes": "139102"
},
{
"name": "Makefile",
"bytes": "11673"
},
{
"name": "Mako",
"bytes": "432"
},
{
"name": "Python",
"bytes": "1007139"
},
{
"name": "Shell",
"bytes": "8899"
}
],
"symlink_target": ""
} |
from app import db
from flask import current_app
from flask.ext.login import UserMixin
from . import login_manager
from hashlib import md5
from boto.s3.connection import S3Connection
from boto.s3.key import Key
import random
import os
import requests
IMAGE_TYPES = ['poster', 'series', 'fanart', 'season']
STATUSES = {'Continuing': 'c', 'Ended': 'e', 'On Hiatus': 'h', 'Other': 'o'}
# Add inverse mapping
STATUSES.update(dict((STATUSES[k], k) for k in STATUSES))
DAYS_OF_WEEK = {'Sunday': 'su',
'Monday': 'mo',
'Tuesday': 'tu',
'Wednesday': 'we',
'Thursday': 'th',
'Friday': 'fr',
'Saturday': 'sa'}
# Add inverse mapping
DAYS_OF_WEEK.update(dict((DAYS_OF_WEEK[k], k) for k in DAYS_OF_WEEK))
user_images = db.Table('user_images',
db.Column('user_id', db.Integer,
db.ForeignKey('users.id'), nullable=False),
db.Column('image_id', db.Integer,
db.ForeignKey('images.id'), nullable=False))
user_series = db.Table('user_series',
db.Column('user_id', db.Integer,
db.ForeignKey('users.id'), nullable=False),
db.Column('series_id', db.Integer,
db.ForeignKey('series.id'), nullable=False))
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
users = db.relationship('User', backref='role', cascade="all,delete")
def __repr__(self):
return '<Role %r>' % self.name
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(120), index=True, unique=True)
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
images = db.relationship('Image', secondary=user_images, lazy='dynamic')
favorites = db.relationship('Series', secondary=user_series,
lazy='dynamic')
created_at = db.Column(db.DateTime)
updated_at = db.Column(db.DateTime)
last_seen = db.Column(db.DateTime)
def save_images(self, series):
for image in series.images:
self.images.append(image)
db.session.merge(self)
db.session.commit()
def upcoming(self):
return [
{
'air_date': 'April 18, 2014',
'series': {'name': 'Game of Thrones'},
'name': 'Song of Ice and Fire',
},
{
'air_date': 'September 11, 2014',
'series': {'name': 'Breaking Bad'},
'name': 'Felina',
},
{
'air_date': 'December 7, 2014',
'series': {'name': 'Star Trek: The Next Generation'},
'name': 'Inner Light',
},
]
def avatar(self, size):
return ('http://www.gravatar.com/avatar/' +
md5(self.email).hexdigest() + '?d=mm&s=' + str(size))
def __repr__(self):
return '<User %r>' % (self.name)
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class Series(db.Model):
__tablename__ = 'series'
id = db.Column(db.Integer, primary_key=True)
air_day = db.Column(db.Enum('su', 'mo', 'tu', 'we', 'th', 'fr', 'sa',
name='day_of_week'))
air_time = db.Column(db.Time)
first_aired = db.Column(db.Date)
network = db.Column(db.String())
overview = db.Column(db.String())
rating = db.Column(db.Numeric(3, 1))
rating_count = db.Column(db.Integer)
runtime = db.Column(db.Integer)
name = db.Column(db.String(), nullable=False)
status = db.Column(db.Enum('c', 'e', 'h', 'o', name='status'))
last_updated = db.Column(db.DateTime)
episodes = db.relationship('Episode', backref='series', lazy='dynamic')
images = db.relationship('Image', backref='series', lazy='dynamic')
def __repr__(self):
return '<Series %r>' % (self.name)
def __unicode__(self):
return '<Series %r>' % (self.name)
def image(self, type, user):
images = self.all_images(type, user)
return (random.choice(images) if images else "")
def all_images(self, type, user):
if self in user.favorites.all():
return user.images.filter_by(series=self, type=type).all()
else:
return self.images.filter_by(type=type, episode=None).all()
class Episode(db.Model):
__tablename__ = 'episodes'
id = db.Column(db.Integer, primary_key=True)
series_id = db.Column(db.Integer, db.ForeignKey('series.id'),
nullable=False)
season = db.Column(db.Integer)
episode_number = db.Column(db.Integer)
name = db.Column(db.String())
overview = db.Column(db.String())
rating = db.Column(db.Numeric(3, 1))
rating_count = db.Column(db.Integer)
air_date = db.Column(db.Date)
images = db.relationship('Image', backref='episode', lazy='dynamic')
def __repr__(self):
return '<Episode %r>' % (self.name)
def __unicode__(self):
return '<Episode %r>' % (self.name)
def image(self, user):
return (random.choice(self.images.all()) if self.images.all() else "")
class Image(db.Model):
__tablename__ = 'images'
id = db.Column(db.Integer, primary_key=True)
episode_id = db.Column(db.Integer, db.ForeignKey('episodes.id'),
nullable=True)
series_id = db.Column(db.Integer, db.ForeignKey('series.id'),
nullable=False)
source = db.Column(db.String, nullable=False, unique=True)
key = db.Column(db.String, nullable=False, unique=True)
type = db.Column(db.Enum('poster', 'series', 'fanart', 'season',
name='image_types'), nullable=False)
def save(self):
conn = S3Connection(current_app.config['AWS_ACCESS_KEY'],
current_app.config['AWS_SECRET_KEY'])
bucket = conn.get_bucket(current_app.config['AWS_BUCKET'],
validate=False)
key = Key(bucket, self.key)
if not key.exists():
current_app.logger.debug("Saving image: %s" % self.source)
r = requests.get(self.source)
if r.status_code == 200:
key.set_contents_from_string(r.content)
else:
current_app.logger.debug("Image: %s already saved." % self.key)
def get_url(self):
conn = S3Connection(current_app.config['AWS_ACCESS_KEY'],
current_app.config['AWS_SECRET_KEY'])
bucket = conn.get_bucket(current_app.config['AWS_BUCKET'],
validate=False)
key = Key(bucket, self.key)
return key.generate_url(600)
def __repr__(self):
return '<Image %r>' % (self.key)
def __unicode__(self):
return '<Image %r>' % (self.key)
| {
"content_hash": "c74dd3051ad2013a934aa43810263a96",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 78,
"avg_line_length": 35.39108910891089,
"alnum_prop": 0.5556021821233739,
"repo_name": "happyraul/tv",
"id": "1b6d6b06d579f21f44b6456787bf0a263f8704e0",
"size": "7149",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2259"
},
{
"name": "Python",
"bytes": "41865"
}
],
"symlink_target": ""
} |
"""
PXE Driver and supporting meta-classes.
"""
from ironic.drivers import base
from ironic.drivers.modules import ipminative
from ironic.drivers.modules import ipmitool
from ironic.drivers.modules import pxe
from ironic.drivers.modules import ssh
class PXEAndIPMIToolDriver(base.BaseDriver):
"""PXE + IPMITool driver.
This driver implements the `core` functionality, combinding
:class:ironic.drivers.ipmi.IPMI for power on/off and reboot with
:class:ironic.driver.pxe.PXE for image deployment. Implementations are in
those respective classes; this class is merely the glue between them.
"""
def __init__(self):
self.power = ipmitool.IPMIPower()
self.deploy = pxe.PXEDeploy()
self.rescue = self.deploy
self.vendor = pxe.VendorPassthru()
class PXEAndSSHDriver(base.BaseDriver):
"""PXE + SSH driver.
NOTE: This driver is meant only for testing environments.
This driver implements the `core` functionality, combinding
:class:ironic.drivers.ssh.SSH for power on/off and reboot of virtual
machines tunneled over SSH, with :class:ironic.driver.pxe.PXE for image
deployment. Implementations are in those respective classes; this class is
merely the glue between them.
"""
def __init__(self):
self.power = ssh.SSHPower()
self.deploy = pxe.PXEDeploy()
self.rescue = self.deploy
self.vendor = None
class PXEAndIPMINativeDriver(base.BaseDriver):
"""PXE + Native IPMI driver.
This driver implements the `core` functionality, combining
:class:ironic.drivers.modules.ipminative.NativeIPMIPower for power
on/off and reboot with
:class:ironic.driver.modules.pxe.PXE for image deployment.
Implementations are in those respective classes;
this class is merely the glue between them.
"""
def __init__(self):
self.power = ipminative.NativeIPMIPower()
self.deploy = pxe.PXEDeploy()
self.rescue = self.deploy
self.vendor = pxe.VendorPassthru()
| {
"content_hash": "4f76fa4e2894c3cafabc1150919e9372",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 78,
"avg_line_length": 32.79032258064516,
"alnum_prop": 0.7083128381701919,
"repo_name": "citrix-openstack-build/ironic",
"id": "d6e0237fc8e1cb4f8a7aa6ebfa4abe447eb47802",
"size": "2711",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ironic/drivers/pxe.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19934"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "1142333"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import datetime
from dashboard.api import api_request_handler
from dashboard.common import datastore_hooks
from dashboard.common import namespaced_stored_object
from dashboard.common import utils
from dashboard.models import graph_data
class BadRequestError(Exception):
pass
class TimeseriesHandler(api_request_handler.ApiRequestHandler):
"""API handler for getting timeseries data."""
def _CheckUser(self):
self._CheckIsLoggedIn()
def Post(self, *args):
"""Returns timeseries data in response to API requests.
Argument:
test_path: Full path of test timeseries
Outputs:
JSON timeseries data for the test_path, see README.md.
"""
try:
days = int(self.request.get('num_days', 30))
except ValueError:
raise api_request_handler.BadRequestError(
'Invalid num_days parameter %s' % self.request.get('num_days'))
if days <= 0:
raise api_request_handler.BadRequestError(
'num_days cannot be negative (%s)' % days)
before = datetime.datetime.now() - datetime.timedelta(days=days)
test_path = args[0]
test_key = utils.TestKey(test_path)
test = test_key.get()
if not test:
raise api_request_handler.BadRequestError(
'Invalid test_path %s' % test_path)
assert(
datastore_hooks.IsUnalteredQueryPermitted() or not test.internal_only)
datastore_hooks.SetSinglePrivilegedRequest()
q = graph_data.Row.query()
q = q.filter(graph_data.Row.parent_test == utils.OldStyleTestKey(test_key))
q = q.filter(graph_data.Row.timestamp > before)
rows = q.fetch()
if not rows:
return []
revisions = [rev for rev in rows[0].to_dict() if rev.startswith('r_')]
header = ['revision', 'value', 'timestamp'] + revisions
timeseries = [header]
for row in sorted(rows, key=lambda r: r.revision):
timeseries.append([self._GetValue(row, a) for a in header])
return {
'timeseries': timeseries,
'test_path': test_path,
'revision_logs': namespaced_stored_object.Get('revision_info'),
'improvement_direction': test.improvement_direction,
}
def _GetValue(self, row, attr):
value = getattr(row, attr, None)
if attr == 'timestamp':
return value.isoformat()
return value
| {
"content_hash": "10f66a49f94e811b04048966a0cb71f8",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 79,
"avg_line_length": 30.705128205128204,
"alnum_prop": 0.6755741127348643,
"repo_name": "endlessm/chromium-browser",
"id": "d03f6632f73b36f1cad956f4d1055af3bca6e06a",
"size": "2558",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/catapult/dashboard/dashboard/api/timeseries.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Tests for du command."""
from __future__ import absolute_import
import gslib.tests.testcase as testcase
from gslib.tests.testcase.integration_testcase import SkipForS3
from gslib.tests.util import ObjectToURI as suri
from gslib.util import Retry
class TestDu(testcase.GsUtilIntegrationTestCase):
"""Integration tests for du command."""
def _create_nested_subdir(self):
"""Creates a nested subdirectory for use by tests in this module."""
bucket_uri = self.CreateBucket()
obj_uris = []
obj_uris.append(self.CreateObject(
bucket_uri=bucket_uri, object_name='sub1材/five', contents='5five'))
obj_uris.append(self.CreateObject(
bucket_uri=bucket_uri, object_name='sub1材/four', contents='four'))
obj_uris.append(self.CreateObject(
bucket_uri=bucket_uri, object_name='sub1材/sub2/five', contents='5five'))
obj_uris.append(self.CreateObject(
bucket_uri=bucket_uri, object_name='sub1材/sub2/four', contents='four'))
self.AssertNObjectsInBucket(bucket_uri, 4)
return bucket_uri, obj_uris
def test_object(self):
obj_uri = self.CreateObject(contents='foo')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil(['du', suri(obj_uri)], return_stdout=True)
self.assertEqual(stdout, '%-10s %s\n' % (3, suri(obj_uri)))
_Check()
def test_bucket(self):
bucket_uri = self.CreateBucket()
obj_uri = self.CreateObject(bucket_uri=bucket_uri, contents='foo')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil(['du', suri(bucket_uri)], return_stdout=True)
self.assertEqual(stdout, '%-10s %s\n' % (3, suri(obj_uri)))
_Check()
def test_subdirs(self):
"""Tests that subdirectory sizes are correctly calculated and listed."""
bucket_uri, obj_uris = self._create_nested_subdir()
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil(['du', suri(bucket_uri)], return_stdout=True)
self.assertSetEqual(set(stdout.splitlines()), set([
'%-10s %s' % (5, suri(obj_uris[0])),
'%-10s %s' % (4, suri(obj_uris[1])),
'%-10s %s' % (5, suri(obj_uris[2])),
'%-10s %s' % (4, suri(obj_uris[3])),
'%-10s %s/sub1材/sub2/' % (9, suri(bucket_uri)),
'%-10s %s/sub1材/' % (18, suri(bucket_uri)),
]))
_Check()
def test_multi_args(self):
"""Tests running du with multiple command line arguments."""
bucket_uri = self.CreateBucket()
obj_uri1 = self.CreateObject(bucket_uri=bucket_uri, contents='foo')
obj_uri2 = self.CreateObject(bucket_uri=bucket_uri, contents='foo2')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil(['du', suri(obj_uri1), suri(obj_uri2)],
return_stdout=True)
self.assertSetEqual(set(stdout.splitlines()), set([
'%-10s %s' % (3, suri(obj_uri1)),
'%-10s %s' % (4, suri(obj_uri2)),
]))
_Check()
def test_total(self):
"""Tests total size listing via the -c flag."""
bucket_uri = self.CreateBucket()
obj_uri1 = self.CreateObject(bucket_uri=bucket_uri, contents='foo')
obj_uri2 = self.CreateObject(bucket_uri=bucket_uri, contents='zebra')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil(['du', '-c', suri(bucket_uri)],
return_stdout=True)
self.assertSetEqual(set(stdout.splitlines()), set([
'%-10s %s' % (3, suri(obj_uri1)),
'%-10s %s' % (5, suri(obj_uri2)),
'%-10s total' % 8,
]))
_Check()
def test_human_readable(self):
obj_uri = self.CreateObject(contents='x' * 2048)
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil(['du', '-h', suri(obj_uri)], return_stdout=True)
self.assertEqual(stdout, '%-10s %s\n' % ('2 KiB', suri(obj_uri)))
_Check()
def test_summary(self):
"""Tests summary listing with the -s flag."""
bucket_uri1, _ = self._create_nested_subdir()
bucket_uri2, _ = self._create_nested_subdir()
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil([
'du', '-s', suri(bucket_uri1), suri(bucket_uri2)], return_stdout=True)
self.assertSetEqual(set(stdout.splitlines()), set([
'%-10s %s' % (18, suri(bucket_uri1)),
'%-10s %s' % (18, suri(bucket_uri2)),
]))
_Check()
def test_subdir_summary(self):
"""Tests summary listing with the -s flag on a subdirectory."""
bucket_uri1, _ = self._create_nested_subdir()
bucket_uri2, _ = self._create_nested_subdir()
subdir1 = suri(bucket_uri1, 'sub1材')
subdir2 = suri(bucket_uri2, 'sub1材')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil(
['du', '-s', subdir1, subdir2], return_stdout=True)
self.assertSetEqual(set(stdout.splitlines()), set([
'%-10s %s' % (18, subdir1),
'%-10s %s' % (18, subdir2),
]))
_Check()
@SkipForS3('S3 lists versions in reverse order.')
def test_versioned(self):
"""Tests listing all versions with the -a flag."""
bucket_uri = self.CreateVersionedBucket()
object_uri1 = self.CreateObject(
bucket_uri=bucket_uri, object_name='foo', contents='foo')
object_uri2 = self.CreateObject(
bucket_uri=bucket_uri, object_name='foo', contents='foo2')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['du', suri(bucket_uri)], return_stdout=True)
self.assertEqual(stdout, '%-10s %s\n' % (4, suri(object_uri2)))
_Check1()
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check2():
stdout = self.RunGsUtil(['du', '-a', suri(bucket_uri)],
return_stdout=True)
self.assertSetEqual(set(stdout.splitlines()), set([
'%-10s %s#%s' % (
3, suri(object_uri1), object_uri1.generation),
'%-10s %s#%s' % (
4, suri(object_uri2), object_uri2.generation),
]))
_Check2()
def test_null_endings(self):
"""Tests outputting 0-endings with the -0 flag."""
bucket_uri = self.CreateBucket()
obj_uri1 = self.CreateObject(bucket_uri=bucket_uri, contents='foo')
obj_uri2 = self.CreateObject(bucket_uri=bucket_uri, contents='zebra')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil(['du', '-0c', suri(bucket_uri)],
return_stdout=True)
self.assertSetEqual(set(stdout.split('\0')), set([
'%-10s %s' % (3, suri(obj_uri1)),
'%-10s %s' % (5, suri(obj_uri2)),
'%-10s total' % 8,
''
]))
_Check()
def test_excludes(self):
"""Tests exclude pattern excluding certain file paths."""
bucket_uri, obj_uris = self._create_nested_subdir()
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil([
'du', '-e', '*sub2/five*', '-e', '*sub1材/four',
suri(bucket_uri)], return_stdout=True)
self.assertSetEqual(set(stdout.splitlines()), set([
'%-10s %s' % (5, suri(obj_uris[0])),
'%-10s %s' % (4, suri(obj_uris[3])),
'%-10s %s/sub1材/sub2/' % (4, suri(bucket_uri)),
'%-10s %s/sub1材/' % (9, suri(bucket_uri)),
]))
_Check()
def test_excludes_file(self):
"""Tests file exclusion with the -X flag."""
bucket_uri, obj_uris = self._create_nested_subdir()
fpath = self.CreateTempFile(contents='*sub2/five*\n*sub1材/four')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
stdout = self.RunGsUtil([
'du', '-X', fpath, suri(bucket_uri)], return_stdout=True)
self.assertSetEqual(set(stdout.splitlines()), set([
'%-10s %s' % (5, suri(obj_uris[0])),
'%-10s %s' % (4, suri(obj_uris[3])),
'%-10s %s/sub1材/sub2/' % (4, suri(bucket_uri)),
'%-10s %s/sub1材/' % (9, suri(bucket_uri)),
]))
_Check()
| {
"content_hash": "b39008ad2ff40b5305f0a2ab636f0b1f",
"timestamp": "",
"source": "github",
"line_count": 223,
"max_line_length": 80,
"avg_line_length": 40.93273542600897,
"alnum_prop": 0.6090052585451359,
"repo_name": "Sorsly/subtle",
"id": "3a1436af46cc719c3af5b19597e64b419bcbe757",
"size": "9776",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/platform/gsutil/gslib/tests/test_du.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1581"
},
{
"name": "CSS",
"bytes": "226"
},
{
"name": "HTML",
"bytes": "4637"
},
{
"name": "JavaScript",
"bytes": "3037"
},
{
"name": "PHP",
"bytes": "4543"
},
{
"name": "Pascal",
"bytes": "31"
},
{
"name": "Python",
"bytes": "13243860"
},
{
"name": "Roff",
"bytes": "1050600"
},
{
"name": "Shell",
"bytes": "16136"
},
{
"name": "Smarty",
"bytes": "2484"
},
{
"name": "SourcePawn",
"bytes": "308"
}
],
"symlink_target": ""
} |
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['LinearTrend'] , ['BestCycle'] , ['SVR'] ); | {
"content_hash": "0a1fc776a8142b9dc59040e21dabdd83",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 90,
"avg_line_length": 40.75,
"alnum_prop": 0.7239263803680982,
"repo_name": "antoinecarme/pyaf",
"id": "91ff18e7be4200a00e80955c6578de538cf2d048",
"size": "163",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_LinearTrend_BestCycle_SVR.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
""" This example assigns creatives to placements and creates a unique ad for
each assignment. To get creatives, run GetCreatives.java example. To get
placements, run get_placement.py.
Tags: creative.assignCreativesToPlacements
"""
__author__ = 'api.jdilallo@gmail.com (Joseph DiLallo)'
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfaClient
CREATIVE_IDS = ['INSERT_FIRST_CREATIVE_ID', 'INSERT_SECOND_CREATIVE_ID']
PLACEMENT_IDS = ['INSERT_FIRST_PLACEMENT_ID', 'INSERT_SECOND_PLACEMENT_ID']
def main(client, creative_ids, placement_ids):
# Initialize appropriate service.
creative_service = client.GetCreativeService(
'https://advertisersapitest.doubleclick.net', 'v1.19')
# Create creative placement assignment structure.
creative_placement_assignments = []
for index in range(len(creative_ids)):
creative_placement_assignments.append({
'xsi_type': 'CreativePlacementAssignment',
'creativeId': creative_ids[index],
'placementId': placement_ids[index],
'placementIds': placement_ids
})
# Submit the request.
results = creative_service.AssignCreativesToPlacements(
creative_placement_assignments)
# Display results.
if results:
for assignment_result in results:
if assignment_result['errorMessage'] is None:
print ('Ad with name \'%s\' and ID \'%s\' was created.' %
(assignment_result['adName'], assignment_result['adId']))
else:
print ('Assignment unsuccessful for creative ID \'%s\' and placementID'
' \'%s\'. Error message says \'%s\'.'
% (assignment_result['creativeId'],
assignment_result['placementId'],
assignment_result['errorMessage']))
else:
print 'No ads were created.'
if __name__ == '__main__':
# Initialize client object.
client = DfaClient(path=os.path.join('..', '..', '..', '..'))
main(client, CREATIVE_IDS, PLACEMENT_IDS)
| {
"content_hash": "910e0e24da643f8e482779848e1c0ffe",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 79,
"avg_line_length": 34.28333333333333,
"alnum_prop": 0.6660184735051046,
"repo_name": "donspaulding/adspygoogle",
"id": "df0d28b41a204f24562bfb10adb68aaf787a8e05",
"size": "2675",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/adspygoogle/dfa/v1_19/assign_creatives_to_placements.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3734067"
},
{
"name": "Shell",
"bytes": "603"
}
],
"symlink_target": ""
} |
from django.template import Context
from django.template.loader import get_template
from django import template
register = template.Library()
@register.filter
def bootstrap(element):
element_type = element.__class__.__name__.lower()
if element_type == 'boundfield':
template = get_template("bootstrapform/field.html")
context = Context({'field': element})
else:
has_management = getattr(element, 'management_form', None)
if has_management:
template = get_template("bootstrapform/formset.html")
context = Context({'formset': element})
else:
template = get_template("bootstrapform/form.html")
context = Context({'form': element})
return template.render(context)
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def is_radio(field):
return field.field.widget.__class__.__name__.lower() == "radioselect"
| {
"content_hash": "9d63c77cda479c9c6b8f6614a80580d1",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 75,
"avg_line_length": 31.5,
"alnum_prop": 0.6567460317460317,
"repo_name": "cojito/django-bootstrap-form",
"id": "9f29b27a2a3889ca5df7f86bbe9541ebdab37037",
"size": "1008",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bootstrapform/templatetags/bootstrap.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "9655"
}
],
"symlink_target": ""
} |
from runner.koan import *
class AboutSets(Koan):
def test_sets_make_keep_lists_unique(self):
highlanders = ['MacLeod', 'Ramirez', 'MacLeod', 'Matunas', 'MacLeod', 'Malcolm', 'MacLeod']
there_can_only_be_only_one = set(highlanders)
self.assertEqual(set(['Malcolm', 'Matunas', 'MacLeod', 'Ramirez']), there_can_only_be_only_one)
def test_sets_are_unordered(self):
self.assertEqual(set(['1', '3', '2', '5', '4']), set('12345'))
def test_convert_the_set_into_a_list_to_sort_it(self):
self.assertEqual(['1', '2', '3', '4', '5'], sorted(set('13245')))
# ------------------------------------------------------------------
def chars_in(self, a_set):
return ''.join(sorted(a_set))
def test_set_have_arithmetic_operators(self):
good_guy = set('macleod')
bad_guy = set('mutunas')
self.assertEqual('cdelo', self.chars_in( good_guy - bad_guy) )
self.assertEqual('acdelmnostu', self.chars_in( good_guy | bad_guy ))
self.assertEqual('am', self.chars_in( good_guy & bad_guy ))
self.assertEqual('cdelnostu', self.chars_in( good_guy ^ bad_guy ))
# ------------------------------------------------------------------
def test_we_can_query_set_membership(self):
self.assertEqual(True, 127 in set([127, 0, 0, 1]) )
self.assertEqual(True, 'cow' not in set('apocalypse now') )
def test_we_can_compare_subsets(self):
self.assertEqual(True, set('cake') <= set('cherry cake'))
self.assertEqual(True, set('cake').issubset(set('cherry cake')) )
self.assertEqual(False, set('cake') > set('pie'))
| {
"content_hash": "ff6d88b362e4ad93d3cc6294d297c5d1",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 103,
"avg_line_length": 40.642857142857146,
"alnum_prop": 0.5371997656707674,
"repo_name": "arjunasuresh3/Mypykoans",
"id": "dc2520df6df39ec59b239e6f787954d623e6c55f",
"size": "1754",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python 2/koans/about_sets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "378216"
},
{
"name": "Shell",
"bytes": "1525"
}
],
"symlink_target": ""
} |
from threading import Thread, current_thread, Lock
from time import sleep
report_progress_now = []
progress_lock = Lock()
def check_report_progress(me, id):
global report_progress_now, progress_lock
if report_progress_now[id]:
progress_lock.acquire()
print("{} [{}] is making progress.".format(me.name, me.ident))
report_progress_now[id] = False
progress_lock.release()
def exception_spam(id):
me = current_thread()
while True:
try:
raise Exception()
except Exception:
pass
check_report_progress(me, id)
def sleep_forever(id):
me = current_thread()
while True:
sleep(10)
check_report_progress(me, id)
def busy_loop(id):
me = current_thread()
i = 0
while True:
i = (i % 100000000) + 1
check_report_progress(me, id)
# if i % 10000000 == 0: raise Exception()
if __name__ == '__main__':
num_threads = 10
thread_list = []
thread_fun, main_fun = exception_spam, busy_loop
for i in range(num_threads):
thread_list.append(Thread(target=thread_fun,args=(i,)))
report_progress_now.append(True)
for t in thread_list:
t.start()
report_progress_now.append(True)
me, id = current_thread(), num_threads
while True:
try:
main_fun(id)
except KeyboardInterrupt:
progress_lock.acquire()
for i, _ in enumerate(report_progress_now):
report_progress_now[i] = True
progress_lock.release()
| {
"content_hash": "f25de3804f59094fd762011a9358e722",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 70,
"avg_line_length": 25.80327868852459,
"alnum_prop": 0.5851334180432021,
"repo_name": "DEVSENSE/PTVS",
"id": "120972ddc64cefbb5d471c85afb9acf026fd2d43",
"size": "1574",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "Python/Tests/TestData/DebugAttach/Simple.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "109"
},
{
"name": "Batchfile",
"bytes": "3371"
},
{
"name": "C",
"bytes": "18009"
},
{
"name": "C#",
"bytes": "13972921"
},
{
"name": "C++",
"bytes": "189575"
},
{
"name": "CSS",
"bytes": "7024"
},
{
"name": "HTML",
"bytes": "36546"
},
{
"name": "JavaScript",
"bytes": "85712"
},
{
"name": "Objective-C",
"bytes": "4201"
},
{
"name": "PowerShell",
"bytes": "126643"
},
{
"name": "Python",
"bytes": "963885"
},
{
"name": "Smarty",
"bytes": "8156"
},
{
"name": "Tcl",
"bytes": "24968"
}
],
"symlink_target": ""
} |
import numpy
import scipy.stats
import matplotlib.pylab as pt
import statsmodels.api as sm
#import scipy.optimize.minimize
def narrow_sense_hsq(genotype_mat, phenotype_list):
##Calculates narrow-sense heritability for a given phenotype (length n_segs), with genotypes specified by genotype_mat (n_segs x n_loci).
##This is one iteration; functions with built-in bootstrapping are found below.
##This is a method-of-moments based calculation, currently deprecated in favor of REML (below)
##Calculate relatedness matrix
#genotype_mat = .5 + genotype_mat
n_loci = numpy.float(genotype_mat.shape[1])
n_segs = len(phenotype_list)
rm_allele_frequencies = numpy.mean(genotype_mat, axis = 0)
by_allele_frequencies = 1 - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .1) + (by_allele_frequencies < .1)]
genotype_mat = numpy.delete(genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - genotype_mat)
relatedness_mat = numpy.dot(freq_scaled_genotype_mat, freq_scaled_genotype_mat.T)/numpy.float(n_loci)
phenotype_pairwise_difs = numpy.ones((n_segs,n_segs),dtype='float')
correct_inds = numpy.where(relatedness_mat < .9) #This just excludes the i=j case, or the comparison between a segregant and itself.
relatedness_list = relatedness_mat[correct_inds] - numpy.mean(relatedness_mat[correct_inds])
for i in range(n_segs):
for j in range(n_segs):
phenotype_pairwise_difs[i,j] = (phenotype_list[i] - phenotype_list[j])**2
# #Center y data
phenotype_pairwise_difs_list = phenotype_pairwise_difs[correct_inds] - numpy.mean(phenotype_pairwise_difs[correct_inds])
#Calculate h^2
#print n_loci, n_segs
betahat = 1./numpy.sum(relatedness_list*relatedness_list)*numpy.sum(relatedness_list*phenotype_pairwise_difs_list)
#(binned_relatedness_list, bin_locs, bin_nums) = scipy.stats.binned_statistic(relatedness_list, phenotype_pairwise_difs_list, bins=100)
#pt.scatter(bin_locs[:-1], binned_relatedness_list)
#pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
# pt.figure()
# pt.scatter(relatedness_list, phenotype_pairwise_difs_list)
# pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
#pt.show()
#print betahat, numpy.var(phenotype_list)
h2 = betahat/-2./numpy.var(phenotype_list)
return h2
def narrow_sense_hsq_REML(genotype_mat, phenotype_list, helper_matrix, variance_comps_initial_guess):
##The basic model we are trying to fit is Y = Zu + e, where Z is the genotype matrix, and u and e are normal random variables.
##In the case where there are replicates, we are fitting Y_i = \sum Z_ik u_k + H_ij d_j + e_i, where Y_i is the phenotype on a population; d_j is a normal random variable associated with each genotype (this is the heritable part of the variance that is not explained by a linear model of loci), H_ij is the helper matrix (n_pops x n_segs) which is 1 if pop j is descended from segregant i, 0 otherwise; and e_i is a random variable for each population.
##
##This function calculates h^2 by maximizing the log-likelihood associated with the model KY = K(Zu + e), where e is N(0,sigma_e^2), u is N(0,sigma_u^2), sigma_u^2 + sigma_e^2 = var(Y), and K is a matrix such that KX = 0, where X are the fixed effects (here a vector of ones).
##And Z is the genotype matrix, scaled to have mean 0 and variance 1. Z is n_pops x n_loci; u has n_loci entries; Y has n_pops entries.
n_loci = numpy.float(genotype_mat.shape[1])
centered_phenotypes = (numpy.array(phenotype_list) - numpy.mean(phenotype_list))/numpy.std(phenotype_list) #For ease, we are taking out the global mean at the start. Note that this fixes one coefficient and thus biases our variance estimates down slightly, but it should be a very small correction (i.e. 230/229)
expanded_genotype_mat = numpy.dot(helper_matrix,genotype_mat)
n_pops = expanded_genotype_mat.shape[0]
rm_allele_frequencies = numpy.mean(expanded_genotype_mat, axis = 0)
by_allele_frequencies = 1. - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .02) + (by_allele_frequencies < .02)]
genotype_mat = numpy.delete(genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
expanded_genotype_mat = numpy.dot(helper_matrix,genotype_mat)
#Scale the genotype matrix so that sigma_u^2 is the additive variance
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*expanded_genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - expanded_genotype_mat)
G = freq_scaled_genotype_mat
relatedness_mat = numpy.dot( G, G.T )/numpy.float(n_loci)
##The other covariance matrix contains the replicate structure:
H = numpy.dot(helper_matrix, helper_matrix.T)
##The matrix K is (n-r)xn, such that KX = 0, where X are the fixed effects:
K = numpy.identity(n_pops - 1) - numpy.ones((n_pops - 1, n_pops -1))*1/float(n_pops)
K = numpy.concatenate( (K, -1*numpy.ones((n_pops - 1, 1))*1./float(n_pops)), axis=1)
##REML: Ky ~ N(0, KVK^T)
Ky = numpy.dot(K, centered_phenotypes).reshape((n_pops-1,1))
n_var_comps = len(variance_comps_initial_guess)
V_mats = [G/numpy.sqrt(n_loci), helper_matrix]
V_list = []
for i in range(n_var_comps):
V_list.append( numpy.dot( numpy.dot(K, V_mats[i]), numpy.dot( V_mats[i].T, K.T ) ) - numpy.dot( K, K.T) ) #This is a list that will be useful for calculating derivatives; the second term comes from the fact that we are constraining the variance components to sum to 1
converged = 0
var_comps = variance_comps_initial_guess
n_steps = 0
while (converged < .5 and n_steps < 100):
V_tot = numpy.zeros((n_pops-1,n_pops-1),dtype='float')
for i in range(n_var_comps):
V_tot += numpy.dot( numpy.dot(K, V_mats[i]), numpy.dot( V_mats[i].T, K.T ) )*var_comps[i]
V_tot += numpy.dot( K, K.T )*( 1 - numpy.sum(var_comps) ) ## we are fixing variance components to sum to 1 during estimation
P = numpy.linalg.inv( V_tot )
ll_deriv = ll_derivative(P, V_list, Ky)
ll_hess = ll_hessian(P, V_list, Ky)
#Newton's method for direction to descend
if ll_hess.size < 1.5: ##second derivative is scalar; optimizing over one variable
var_step = ll_deriv/ll_hess[0]
else:
try:
var_step = numpy.dot( numpy.linalg.inv(ll_hess), ll_deriv )
except: ##This should be rare, but if the hessian is singular, reset the variance components to [.1,.5]
print 'Error: singular Hessian'
var_step = numpy.array([.1,.5]) - var_comps
v_comps_temp = var_comps + var_step
if numpy.sum(v_comps_temp) > 1:
var_step_amended = var_step/numpy.sum(v_comps_temp) - 10**(-4)
if numpy.any(v_comps_temp < 0):
ind = numpy.where(v_comps_temp < 0)
var_step_amended = var_step
for i in ind:
var_step_amended[i] = var_step[i] - v_comps_temp[i] + 10**(-4)
else:
var_step_amended = var_step
var_comps += var_step_amended
if max( abs(var_step) ) < 10**(-4):
converged = 1
n_steps += 1
h2_estimate = var_comps[0]
return h2_estimate
def ll_derivative(P, V_list, Ky):
#P is the inverse variance-covariance matrix of the model; in this case, P = (K(\sum_i V_i sigma_i^2)K^T)^-1
#Where V_i is a list of matrices, each npops x npops, which contain the random-effects error structures, and sigma_i^2 are the variance components.
#V_list is a list of the covariance matrices, V_list[i] = KV_iK^T.
#Returns a list of the partial derivatives of the log-likelihood with respect to the variance components.
ll_deriv = numpy.zeros((len(V_list),),dtype='float')
for i in range(len(V_list)):
ll_deriv[i] = -.5*numpy.trace( numpy.dot(P, V_list[i]) ) + .5*numpy.dot( numpy.dot( numpy.dot( numpy.dot( Ky.T, P), V_list[i] ), P), Ky)
return ll_deriv
def ll_hessian(P, V_list, Ky):
Hessian = numpy.zeros((len(V_list),len(V_list)), dtype='float')
for i in range(len(V_list)):
for j in range(i+1):
Hessian[i,j] = .5*numpy.dot(numpy.dot( numpy.dot( numpy.dot( numpy.dot( numpy.dot(Ky.T, P), V_list[i]), P), V_list[j]), P), Ky)
#Hessian[i,j] = -.5*numpy.trace( numpy.dot( numpy.dot( numpy.dot( P, V_list[i] ), P ), V_list[j] ) ) + numpy.dot( numpy.dot( numpy.dot( numpy.dot( numpy.dot(Ky.T, P), V_list[i]), V_list[j]), P), Ky)
Hessian[j,i] = Hessian[i,j]
return Hessian
def narrow_sense_hsq_ML(genotype_mat, phenotype_list, helper_matrix):
##This function calculates h^2 by maximizing the log-likelihood associated with the model Y = Zu + e, where e is N(0,sigma_e^2), and u is N(0,sigma_u^2, with respect to sigma_u^2 and sigma_e^2
##And Z is the genotype matrix, scaled to have mean 0 and variance 1. Z is n_pops x n_loci; u has n_loci entries; Y has n_pops entries.
n_loci = numpy.float(genotype_mat.shape[1])
centered_phenotypes = (numpy.array(phenotype_list) - numpy.mean(phenotype_list))/numpy.std(phenotype_list) #For ease, we are taking out the global mean at the start. Note that this fixes one coefficient and thus biases our variance estimates down slightly, but it should be a very small correction (i.e. 230/229)
expanded_genotype_mat = numpy.dot(helper_matrix,genotype_mat)
n_pops = expanded_genotype_mat.shape[0]
rm_allele_frequencies = numpy.mean(expanded_genotype_mat, axis = 0)
by_allele_frequencies = 1 - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .1) + (by_allele_frequencies < .1)]
genotype_mat = numpy.delete(genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
expanded_genotype_mat = numpy.dot(helper_matrix,genotype_mat)
#Scale the genotype matrix so that sigma_u^2 is the additive variance
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*expanded_genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - expanded_genotype_mat)
#freq_scaled_genotype_mat = 2*expanded_genotype_mat - 1
G = freq_scaled_genotype_mat
relatedness_mat = numpy.dot( G, G.T )/numpy.float(n_loci)
##Initialize guesses for sigma2_mu:
#initial_params = .5 ##We could do this more intelligently, using H^2
#Use scipy.minimize to minimize the negative log-likelihood with respect to sigma2_mu and sigma2_e
#min_obj = scipy.optimize.minimize(loglikelihood_random_effects_model, x0 = initial_params, args = (freq_scaled_genotype_mat, phenotype_list), bounds=(0,1))
#sigma2_u = min_obj.x
##Because the function is one-dimensional, let's just sweep across a range of parameters and try to find the minimum by hand
step = .01
n_steps_u = int(.9/step)
n_steps_e = int(.49/step)
sigma2_u_vec = numpy.arange(.1,1,step)*numpy.var(centered_phenotypes)
sigma2_e_vec = numpy.arange(.01,.5,step)*numpy.var(centered_phenotypes)
negloglikelihood_array = numpy.zeros((n_steps_u,n_steps_e),dtype='float')
negloglikelihood_vec = numpy.zeros((n_steps_u,))
negloglikelihood_vec2 = numpy.zeros((n_steps_u,))
##The matrix K is (n-r)xn, such that KX = 0, where X are the fixed effects:
K = numpy.identity(n_pops - 1) - numpy.ones((n_pops - 1, n_pops -1))*1/float(n_pops)
K = numpy.concatenate( (K, -1*numpy.ones((n_pops - 1, 1))*1./float(n_pops)), axis=1)
for i in range(n_steps_u):
for j in range(n_steps_e):
sigma2_u = sigma2_u_vec[i]
sigma2_e = sigma2_e_vec[j]
#sigma2_e = 1 - sigma2_u
#negloglikelihood_vec[i] = negloglikelihood_REML(sigma2_u, sigma2_e, freq_scaled_genotype_mat, centered_phenotypes, K, relatedness_mat)
#negloglikelihood_vec2[i] = negloglikelihood_REML(sigma2_u, sigma2_e, freq_scaled_genotype_mat, centered_phenotypes, K, numpy.dot(helper_matrix,helper_matrix.T))
#The function calculates -1*log-likelihood
negloglikelihood_array[i,j] = negloglikelihood_REML(sigma2_u, sigma2_e, freq_scaled_genotype_mat, centered_phenotypes, K, relatedness_mat)
#negloglikelihood_array[i,j] = negloglikelihood_REML2(sigma2_u, sigma2_e, centered_phenotypes, relatedness_mat)
#pt.plot(negloglikelihood_vec)
CS = pt.contour(numpy.log(negloglikelihood_array),20)
pt.clabel(CS, inline=1, fontsize=10)
#pt.plot(negloglikelihood_vec2,'r')
#CS = pt.contour(numpy.log(negloglikelihood_array),20)
#pt.clabel(CS, inline=1, fontsize=10)
pt.show()
min_index = numpy.nanargmin( negloglikelihood_array )
min_index_e = min_index % n_steps_e
min_index_u = numpy.floor( min_index/float(n_steps_e) )
##If constraining variance components to sum to 1 during optimization:
#min_index_u = numpy.nanargmin( negloglikelihood_vec )
#h2_estimate = sigma2_u_estimate/numpy.var(centered_phenotypes)
##Otherwise:
sigma2_u_estimate = sigma2_u_vec[min_index_u]
sigma2_e_estimate = sigma2_e_vec[min_index_e]
#e_estimate = sigma2_e_estimate/numpy.var(centered_phenotypes)
h2_estimate = sigma2_u_estimate/(sigma2_u_estimate + sigma2_e_estimate) #This is what Josh Bloom does... does this make more or less sense than constraining the variance components to sum to one in the model?
#sigma2_e_estimate = sigma2_e_vec[min_index_e]
#min_index_u2 = numpy.nanargmin( negloglikelihood_vec2 )
#H2_estimate = sigma2_u_vec[min_index_u2]/numpy.var(centered_phenotypes)
ll_min = negloglikelihood_vec[min_index_u]
h2_lb = numpy.min( sigma2_u_vec[ negloglikelihood_vec - ll_min < numpy.log(10) ]) #2 log-likelihood units; note we calculated log-likelihood*2
h2_ub = numpy.max( sigma2_u_vec[ negloglikelihood_vec - ll_min < numpy.log(10) ])
print h2_estimate, h2_lb, h2_ub
#print H2_estimate
return h2_estimate #, h2_lb, h2_ub
def negloglikelihood_random_effects_model(sigma2_mu, sigma2_e, freq_scaled_genotype_mat, phenotype_list):
##This function calculates -2 times the log-likelihood associated with the model Y = Zu + e, where e is N(0,sigma_e^2), and u is N(0,sigma_u^2
##And Z is the genotype matrix, scaled to have mean 0, with variance 1/sqrt(n_loci) down each column. Z is n_pops x n_loci; u has n_loci entries; Y has n_pops entries.
#V: variance-covariance matrix of Y. Under this model, this is V = ZZ^T sigma2_mu + I sigma2_eps
n_pops, n_loci = freq_scaled_genotype_mat.shape
G = freq_scaled_genotype_mat
relatedness_mat = numpy.dot( G, G.T )
V = relatedness_mat*sigma2_mu + numpy.identity(n_pops)*sigma2_e
Vinv = numpy.linalg.inv(V)
Vdet = numpy.linalg.det(V)
if Vdet == 0:
Vdet = 1
#Log likelihood times -1, without the initial scale factor that is constant in sigma2_mu and sigma2_e
negloglikelihood = .5*(numpy.log( Vdet ) + numpy.dot( numpy.dot(phenotype_list.T, Vinv), phenotype_list ))
return negloglikelihood
def negloglikelihood_REML(sigma2_mu, sigma2_e, freq_scaled_genotype_mat, phenotype_list, K, relatedness_mat):
##This function calculates -1 times the log-likelihood associated with the model Y = \beta + Zu + e, where e is N(0,sigma_e^2), and u is N(0,sigma_u^2
##And Z is the genotype matrix, scaled to have mean 0 and variance 1. Z is n_pops x n_loci; u has n_loci entries; Y has n_pops entries.
##In REML, we are calculating the likelihood of KY, where Y is the observed data and K is an (n-r) x n matrix of full rank, chosen such that KX = 0, where X is the matrix of fixed effects.
##Here, we are only fixing the overall mean, so X is an nx1 vector of ones.
#V: variance-covariance matrix of Y. Under this model, this is V = ZZ^T sigma2_mu + I sigma2_eps
n_pops = relatedness_mat.shape[0]
V = relatedness_mat*sigma2_mu + numpy.identity(n_pops)*sigma2_e
c = 1.
#c = 1/(2*(float(n_pops) - 1))
K_scaled = K/c
KVKT = numpy.dot( numpy.dot( K_scaled, V ), K_scaled.T )
KVKTinv = numpy.linalg.inv(KVKT)*(c**2)
KVKTdet = numpy.linalg.det(KVKT)
#if KVKTdet ==0:
# KVKTdet = 1
logKVKTdet = 2*(n_pops - 1) * numpy.log( c ) + numpy.log( KVKTdet )
#Log likelihood times -1, without the initial scale factor that is constant in sigma2_mu and sigma2_e
phenotype_list = phenotype_list.reshape((n_pops,1))
negloglikelihood = .5*(logKVKTdet + numpy.dot( numpy.dot( numpy.dot(K, phenotype_list).T, KVKTinv), numpy.dot(K, phenotype_list) ))
#print logKVKTdet
#print numpy.dot( numpy.dot( numpy.dot(K, phenotype_list).T, KVKTinv), numpy.dot(K, phenotype_list) )
return negloglikelihood
def negloglikelihood_REML(sigma2_mu, sigma2_e1, sigma2_e2, phenotype_list, K, relatedness_mat, error_structure):
##This function calculates -1 times the log-likelihood associated with the model Y = \beta + Zu + e, where e is N(0,sigma_e^2), and u is N(0,sigma_u^2
##And Z is the genotype matrix, scaled to have mean 0 and variance 1. Z is n_pops x n_loci; u has n_loci entries; Y has n_pops entries.
##In REML, we are calculating the likelihood of KY, where Y is the observed data and K is an (n-r) x n matrix of full rank, chosen such that KX = 0, where X is the matrix of fixed effects.
##Here, we are only fixing the overall mean, so X is an nx1 vector of ones.
#V: variance-covariance matrix of Y. Under this model, this is V = ZZ^T sigma2_mu + I sigma2_eps
n_pops = relatedness_mat.shape[0]
V = relatedness_mat*sigma2_mu + error_structure*sigma2_e1 + numpy.identity(n_pops)*sigma2_e2
c = 1.
#c = 1/(2*(float(n_pops) - 1))
K_scaled = K/c
KVKT = numpy.dot( numpy.dot( K_scaled, V ), K_scaled.T )
KVKTinv = numpy.linalg.inv(KVKT)*(c**2)
KVKTdet = numpy.linalg.det(KVKT)
#if KVKTdet ==0:
# KVKTdet = 1
logKVKTdet = 2*(n_pops - 1) * numpy.log( c ) + numpy.log( KVKTdet )
#Log likelihood times -1, without the initial scale factor that is constant in sigma2_mu and sigma2_e
phenotype_list = phenotype_list.reshape((n_pops,1))
negloglikelihood = .5*(logKVKTdet + numpy.dot( numpy.dot( numpy.dot(K, phenotype_list).T, KVKTinv), numpy.dot(K, phenotype_list) ))
#print logKVKTdet
#print numpy.dot( numpy.dot( numpy.dot(K, phenotype_list).T, KVKTinv), numpy.dot(K, phenotype_list) )
return negloglikelihood
def negloglikelihood_REML2(sigma2_mu, sigma2_e, phenotype_list, relatedness_mat):
##This function calculates -2 times the log-likelihood associated with the model Y = \beta + Zu + e, where e is N(0,sigma_e^2), and u is N(0,sigma_u^2
##And Z is the genotype matrix, scaled to have mean 0 and variance 1. Z is n_pops x n_loci; u has n_loci entries; Y has n_pops entries.
##In REML, we are calculating the likelihood of KY, where Y is the observed data and K is an (n-r) x n matrix of full rank, chosen such that KX = 0, where X is the matrix of fixed effects.
##The variance-covariance matrix is Pinv, where P = Vinv - Vinv X (X^T Vinv X).inv X^T Vinv where X is the matrix of fixed effects and V is the variance-covariance matrix of Y.
#V: variance-covariance matrix of Y. Under this model, this is V = ZZ^T sigma2_mu + I sigma2_eps
n_pops = relatedness_mat.shape[0]
X = numpy.ones((n_pops,1),dtype='float')
V = relatedness_mat*sigma2_mu + numpy.identity(n_pops)*sigma2_e
Vinv = numpy.linalg.inv(V)
XTVinvXinv = numpy.linalg.inv( numpy.dot(numpy.dot(X.T, Vinv), X) )
P = Vinv - numpy.dot( numpy.dot( numpy.dot( numpy.dot( Vinv, X ), XTVinvXinv), X.T), Vinv )
#print 'round'
#print numpy.linalg.det(P)
#print numpy.dot( numpy.dot( phenotype_list.T, P), phenotype_list )
ll = .5*numpy.log( numpy.linalg.det(P) ) - .5*numpy.dot( numpy.dot( phenotype_list.T, P), phenotype_list )
#print ll
return -1*ll
def narrow_sense_hsq_window(genotype_mat, phenotype_list, window):
##Calculates narrow-sense heritability for a given phenotype (length n_segs), with genotypes specified by genotype_mat (n_segs x n_loci).
##This is one iteration; functions with built-in bootstrapping are found below.
##Calculate relatedness matrix
#genotype_mat = .5 + genotype_mat
n_loci = numpy.float(genotype_mat.shape[1])
n_segs = len(phenotype_list)
rm_allele_frequencies = numpy.mean(genotype_mat, axis = 0)
by_allele_frequencies = 1 - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .1) + (by_allele_frequencies < .1)]
genotype_mat = numpy.delete(genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - genotype_mat)
relatedness_mat = numpy.dot(freq_scaled_genotype_mat, freq_scaled_genotype_mat.T)/numpy.float(n_loci)
phenotype_pairwise_difs = numpy.ones((n_segs,n_segs),dtype='float')
correct_inds = numpy.where(numpy.abs(relatedness_mat) < window) #This just excludes the i=j case, or the comparison between a segregant and itself.
relatedness_list = relatedness_mat[correct_inds] - numpy.mean(relatedness_mat[correct_inds])
for i in range(n_segs):
for j in range(n_segs):
phenotype_pairwise_difs[i,j] = (phenotype_list[i] - phenotype_list[j])**2
# #Center y data
phenotype_pairwise_difs_list = phenotype_pairwise_difs[correct_inds] - numpy.mean(phenotype_pairwise_difs[correct_inds])
#Calculate h^2
#print n_loci, n_segs
betahat = 1./numpy.sum(relatedness_list*relatedness_list)*numpy.sum(relatedness_list*phenotype_pairwise_difs_list)
#(binned_relatedness_list, bin_locs, bin_nums) = scipy.stats.binned_statistic(relatedness_list, phenotype_pairwise_difs_list, bins=100)
#pt.scatter(bin_locs[:-1], binned_relatedness_list)
#pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
# pt.figure()
# pt.scatter(relatedness_list, phenotype_pairwise_difs_list)
# pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
#pt.show()
#print betahat, numpy.var(phenotype_list)
h2 = betahat/-2./numpy.var(phenotype_list)
return h2
def narrow_sense_hsq_bootstrap(genotype_mat, phenotype_list):
##Narrow-sense heritability calculations
# #Calculate relatedness matrix
#genotype_mat = .5 + genotype_mat
n_loci = numpy.float(genotype_mat.shape[1])
n_segs = len(phenotype_list)
rm_allele_frequencies = numpy.mean(genotype_mat, axis = 0)
by_allele_frequencies = 1 - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .1) + (by_allele_frequencies < .1)]
genotype_mat = numpy.delete(genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - genotype_mat)
relatedness_mat = numpy.dot(freq_scaled_genotype_mat, freq_scaled_genotype_mat.T)/numpy.float(n_loci)
phenotype_pairwise_difs = numpy.ones((n_segs,n_segs),dtype='float')
correct_inds = numpy.where(relatedness_mat < .9)
#lower_tri_indices = numpy.tril_indices(n_segs, 0)
#lower_tri_indices = numpy.tril_indices(n_segs, -1)
relatedness_list = relatedness_mat[correct_inds] - numpy.mean(relatedness_mat[correct_inds])
for i in range(n_segs):
for j in range(n_segs):
phenotype_pairwise_difs[i,j] = (phenotype_list[i] - phenotype_list[j])**2
# #Center y data
phenotype_pairwise_difs_list = phenotype_pairwise_difs[correct_inds] - numpy.mean(phenotype_pairwise_difs[correct_inds])
#Calculate h^2
#print n_loci, n_segs
betahat = 1./numpy.sum(relatedness_list*relatedness_list)*numpy.sum(relatedness_list*phenotype_pairwise_difs_list)
#(binned_relatedness_list, bin_locs, bin_nums) = scipy.stats.binned_statistic(relatedness_list, phenotype_pairwise_difs_list, bins=100)
#pt.scatter(bin_locs[:-1], binned_relatedness_list)
#pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
# pt.figure()
# pt.scatter(relatedness_list, phenotype_pairwise_difs_list)
# pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
#pt.show()
#print betahat, numpy.var(phenotype_list)
h2 = betahat/-2./numpy.var(phenotype_list)
# #Jacknife (leave one out) to calculate confidence intervals
h2_bs = []
for k in range(1000):
bootstrap_inds = numpy.random.choice(numpy.arange(n_segs), size=n_segs)
phenotype_pairwise_difs_bs = phenotype_pairwise_difs[bootstrap_inds,:][:,bootstrap_inds]
relatedness_bs = relatedness_mat[bootstrap_inds,:][:,bootstrap_inds]
phenotype_list_bs = phenotype_list[bootstrap_inds]
correct_inds = numpy.where(relatedness_bs < .9)
#re-center y data
phenotype_pairwise_difs_list_bs = phenotype_pairwise_difs_bs[correct_inds] - numpy.mean(phenotype_pairwise_difs_bs[correct_inds])
relatedness_list_bs = relatedness_bs[correct_inds] - numpy.mean(relatedness_bs[correct_inds])
# #((X^TX)^-1)XTy
betahat = 1./numpy.sum(relatedness_list_bs*relatedness_list_bs)*numpy.sum(relatedness_list_bs*phenotype_pairwise_difs_list_bs)
h2_bs.append(betahat/-2./numpy.var(phenotype_list_bs))
#return h2, [numpy.percentile(h2_bs,2.5), numpy.percentile(h2_bs,97.5)]
return h2, [numpy.percentile(h2_bs,25), numpy.percentile(h2_bs,75)]
def narrow_sense_hsq_bootstrap_ninetyfive(genotype_mat, phenotype_list):
##Narrow-sense heritability calculations
# #Calculate relatedness matrix
#genotype_mat = .5 + genotype_mat
n_loci = numpy.float(genotype_mat.shape[1])
n_segs = len(phenotype_list)
rm_allele_frequencies = numpy.mean(genotype_mat, axis = 0)
by_allele_frequencies = 1 - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .1) + (by_allele_frequencies < .1)]
genotype_mat = numpy.delete(genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - genotype_mat)
#freq_scaled_genotype_mat = -1*genotype_mat + (1 - genotype_mat)
#freq_scaled_genotype_mat = 2*genotype_mat
relatedness_mat = numpy.dot(freq_scaled_genotype_mat, freq_scaled_genotype_mat.T)/numpy.float(n_loci)
phenotype_pairwise_difs = numpy.ones((n_segs,n_segs),dtype='float')
correct_inds = numpy.where(relatedness_mat < .9)
#lower_tri_indices = numpy.tril_indices(n_segs, 0)
#lower_tri_indices = numpy.tril_indices(n_segs, -1)
relatedness_list = relatedness_mat[correct_inds] - numpy.mean(relatedness_mat[correct_inds])
for i in range(n_segs):
for j in range(n_segs):
phenotype_pairwise_difs[i,j] = (phenotype_list[i] - phenotype_list[j])**2
# #Center y data
phenotype_pairwise_difs_list = phenotype_pairwise_difs[correct_inds] - numpy.mean(phenotype_pairwise_difs[correct_inds])
#Calculate h^2
#print n_loci, n_segs
betahat = 1./numpy.sum(relatedness_list*relatedness_list)*numpy.sum(relatedness_list*phenotype_pairwise_difs_list)
#(binned_relatedness_list, bin_locs, bin_nums) = scipy.stats.binned_statistic(relatedness_list, phenotype_pairwise_difs_list, bins=100)
#pt.scatter(bin_locs[:-1], binned_relatedness_list)
#pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
# pt.figure()
# pt.scatter(relatedness_list, phenotype_pairwise_difs_list)
# pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
#pt.show()
#print betahat, numpy.var(phenotype_list)
h2 = betahat/-2./numpy.var(phenotype_list)
# #Jacknife (leave one out) to calculate confidence intervals
h2_bs = []
for k in range(1000):
bootstrap_inds = numpy.random.choice(numpy.arange(n_segs), size=n_segs)
phenotype_pairwise_difs_bs = phenotype_pairwise_difs[bootstrap_inds,:][:,bootstrap_inds]
relatedness_bs = relatedness_mat[bootstrap_inds,:][:,bootstrap_inds]
phenotype_list_bs = phenotype_list[bootstrap_inds]
correct_inds = numpy.where(relatedness_bs < .9)
#re-center y data
phenotype_pairwise_difs_list_bs = phenotype_pairwise_difs_bs[correct_inds] - numpy.mean(phenotype_pairwise_difs_bs[correct_inds])
relatedness_list_bs = relatedness_bs[correct_inds] - numpy.mean(relatedness_bs[correct_inds])
# #((X^TX)^-1)XTy
betahat = 1./numpy.sum(relatedness_list_bs*relatedness_list_bs)*numpy.sum(relatedness_list_bs*phenotype_pairwise_difs_list_bs)
h2_bs.append(betahat/-2./numpy.var(phenotype_list_bs))
return h2, [numpy.percentile(h2_bs,2.5), numpy.percentile(h2_bs,97.5)]
#return h2, [numpy.percentile(h2_bs,25), numpy.percentile(h2_bs,75)]
def narrow_sense_hsq_replicates(genotype_mat, phenotype_list, helper_matrix):
##Narrow-sense heritability calculations
# #Calculate relatedness matrix
#genotype_mat = .5 + genotype_mat
n_loci = numpy.float(genotype_mat.shape[1])
n_segs = numpy.int(genotype_mat.shape[0])
n_pops = len(phenotype_list)
expanded_genotype_mat = numpy.dot(helper_matrix,genotype_mat)
rm_allele_frequencies = numpy.mean(expanded_genotype_mat, axis = 0)
by_allele_frequencies = 1 - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .1) + (by_allele_frequencies < .1)]
expanded_genotype_mat = numpy.delete(expanded_genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*expanded_genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - expanded_genotype_mat)
#freq_scaled_genotype_mat = -1*expanded_genotype_mat + (1 - expanded_genotype_mat)
#freq_scaled_genotype_mat = 2*expanded_genotype_mat
#print freq_scaled_genotype_mat
relatedness_mat = numpy.dot(freq_scaled_genotype_mat, freq_scaled_genotype_mat.T)/numpy.float(n_loci)
correct_inds = numpy.where(relatedness_mat < .9)
phenotype_pairwise_difs = numpy.ones((n_pops,n_pops),dtype='float')
relatedness_list = relatedness_mat[correct_inds] - numpy.mean(relatedness_mat[correct_inds],dtype='float')
for i in range(n_pops):
for j in range(n_pops):
phenotype_pairwise_difs[i,j] = (phenotype_list[i] - phenotype_list[j])**2
# #Center y data
phenotype_pairwise_difs_list = phenotype_pairwise_difs[correct_inds] - numpy.mean(phenotype_pairwise_difs[correct_inds])
#Calculate h^2
betahat = 1./numpy.sum(relatedness_list*relatedness_list)*numpy.sum(relatedness_list*phenotype_pairwise_difs_list)
# (binned_relatedness_list, bin_locs, bin_nums) = scipy.stats.binned_statistic(relatedness_list, phenotype_pairwise_difs_list, bins=100)
# pt.scatter(bin_locs[:-1], binned_relatedness_list)
# pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
# pt.show()
h2 = betahat/-2./numpy.var(phenotype_list)
return h2
def narrow_sense_hsq_replicates_window(genotype_mat, phenotype_list, helper_matrix, window):
##Narrow-sense heritability calculations
# #Calculate relatedness matrix
#genotype_mat = .5 + genotype_mat
n_loci = numpy.float(genotype_mat.shape[1])
n_segs = numpy.int(genotype_mat.shape[0])
n_pops = len(phenotype_list)
expanded_genotype_mat = numpy.dot(helper_matrix,genotype_mat)
rm_allele_frequencies = numpy.mean(expanded_genotype_mat, axis = 0)
by_allele_frequencies = 1 - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .1) + (by_allele_frequencies < .1)]
expanded_genotype_mat = numpy.delete(expanded_genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*expanded_genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - expanded_genotype_mat)
#freq_scaled_genotype_mat = -1*expanded_genotype_mat + (1 - expanded_genotype_mat)
#freq_scaled_genotype_mat = 2*expanded_genotype_mat
#print freq_scaled_genotype_mat
relatedness_mat = numpy.dot(freq_scaled_genotype_mat, freq_scaled_genotype_mat.T)/numpy.float(n_loci)
correct_inds = numpy.where(numpy.abs(relatedness_mat) < window)
phenotype_pairwise_difs = numpy.ones((n_pops,n_pops),dtype='float')
relatedness_list = relatedness_mat[correct_inds] - numpy.mean(relatedness_mat[correct_inds],dtype='float')
for i in range(n_pops):
for j in range(n_pops):
phenotype_pairwise_difs[i,j] = (phenotype_list[i] - phenotype_list[j])**2
# #Center y data
phenotype_pairwise_difs_list = phenotype_pairwise_difs[correct_inds] - numpy.mean(phenotype_pairwise_difs[correct_inds])
#Calculate h^2
betahat = 1./numpy.sum(relatedness_list*relatedness_list)*numpy.sum(relatedness_list*phenotype_pairwise_difs_list)
# (binned_relatedness_list, bin_locs, bin_nums) = scipy.stats.binned_statistic(relatedness_list, phenotype_pairwise_difs_list, bins=100)
# pt.scatter(bin_locs[:-1], binned_relatedness_list)
# pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
# pt.show()
h2 = betahat/-2./numpy.var(phenotype_list)
return h2
def narrow_sense_hsq_replicates_bootstrap(genotype_mat, phenotype_list, helper_matrix):
##Narrow-sense heritability calculations
# #Calculate relatedness matrix
#genotype_mat = .5 + genotype_mat
n_loci = numpy.float(genotype_mat.shape[1])
n_segs = numpy.int(genotype_mat.shape[0])
n_pops = len(phenotype_list)
expanded_genotype_mat = numpy.dot(helper_matrix,genotype_mat)
rm_allele_frequencies = numpy.mean(expanded_genotype_mat, axis = 0)
by_allele_frequencies = 1 - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .1) + (by_allele_frequencies < .1)]
expanded_genotype_mat = numpy.delete(expanded_genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*expanded_genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - expanded_genotype_mat)
#freq_scaled_genotype_mat = -1*expanded_genotype_mat + (1 - expanded_genotype_mat)
#freq_scaled_genotype_mat = 2*expanded_genotype_mat
#print freq_scaled_genotype_mat
relatedness_mat = numpy.dot(freq_scaled_genotype_mat, freq_scaled_genotype_mat.T)/numpy.float(n_loci)
correct_inds = numpy.where(relatedness_mat < .9)
phenotype_pairwise_difs = numpy.ones((n_pops,n_pops),dtype='float')
relatedness_list = relatedness_mat[correct_inds] - numpy.mean(relatedness_mat[correct_inds],dtype='float')
for i in range(n_pops):
for j in range(n_pops):
phenotype_pairwise_difs[i,j] = (phenotype_list[i] - phenotype_list[j])**2
# #Center y data
phenotype_pairwise_difs_list = phenotype_pairwise_difs[correct_inds] - numpy.mean(phenotype_pairwise_difs[correct_inds])
#Calculate h^2
betahat = 1./numpy.sum(relatedness_list*relatedness_list)*numpy.sum(relatedness_list*phenotype_pairwise_difs_list)
# (binned_relatedness_list, bin_locs, bin_nums) = scipy.stats.binned_statistic(relatedness_list, phenotype_pairwise_difs_list, bins=100)
# pt.scatter(bin_locs[:-1], binned_relatedness_list)
# pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
# pt.show()
h2 = betahat/-2./numpy.var(phenotype_list)
# #Bootstrap to calculate confidence intervals
h2_bs = []
for k in range(1000):
bootstrap_inds = numpy.random.choice(numpy.arange(n_segs), size=n_segs)
expanded_indices = []
for bootstrap_ind in bootstrap_inds:
expanded_indices.extend(numpy.where(helper_matrix[:,bootstrap_ind] == 1)[0])
phenotype_pairwise_difs_bs = phenotype_pairwise_difs[expanded_indices, :][:, expanded_indices]
#print phenotype_pairwise_difs_bs.shape
relatedness_bs = relatedness_mat[expanded_indices, :][:, expanded_indices]
phenotype_list_bs = phenotype_list[expanded_indices]
correct_inds_bs = numpy.where(relatedness_bs < .9)
#re-center y data
phenotype_pairwise_difs_list_bs = (phenotype_pairwise_difs_bs[correct_inds_bs] - numpy.mean(phenotype_pairwise_difs_bs[correct_inds_bs])).flatten()
relatedness_list_bs = (relatedness_bs[correct_inds_bs] - numpy.mean(relatedness_bs[correct_inds_bs],dtype='float')).flatten()
# #((X^TX)^-1)XTy
betahat = 1./numpy.sum(relatedness_list_bs*relatedness_list_bs)*numpy.sum(relatedness_list_bs*phenotype_pairwise_difs_list_bs)
h2_bs.append(betahat/-2./numpy.var(phenotype_list_bs))
#return h2, [numpy.percentile(h2_bs,2.5), numpy.percentile(h2_bs,97.5)]
return h2, [numpy.percentile(h2_bs,25), numpy.percentile(h2_bs,75)]
def narrow_sense_hsq_replicates_bootstrap_ninetyfive(genotype_mat, phenotype_list, helper_matrix):
##Narrow-sense heritability calculations
# #Calculate relatedness matrix
#genotype_mat = .5 + genotype_mat
n_loci = numpy.float(genotype_mat.shape[1])
n_segs = numpy.int(genotype_mat.shape[0])
n_pops = len(phenotype_list)
expanded_genotype_mat = numpy.dot(helper_matrix,genotype_mat)
rm_allele_frequencies = numpy.mean(expanded_genotype_mat, axis = 0)
by_allele_frequencies = 1 - rm_allele_frequencies
fixed_loci = numpy.arange(n_loci)[(rm_allele_frequencies < .1) + (by_allele_frequencies < .1)]
expanded_genotype_mat = numpy.delete(expanded_genotype_mat, fixed_loci, axis = 1)
rm_allele_frequencies = numpy.delete(rm_allele_frequencies, fixed_loci)
by_allele_frequencies = numpy.delete(by_allele_frequencies, fixed_loci)
n_loci -= len(fixed_loci)
freq_scaled_genotype_mat = -1*by_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*expanded_genotype_mat + rm_allele_frequencies/numpy.sqrt(by_allele_frequencies*rm_allele_frequencies)*(1 - expanded_genotype_mat)
#freq_scaled_genotype_mat = -1*expanded_genotype_mat + (1 - expanded_genotype_mat)
#freq_scaled_genotype_mat = 2*expanded_genotype_mat
#print freq_scaled_genotype_mat
relatedness_mat = numpy.dot(freq_scaled_genotype_mat, freq_scaled_genotype_mat.T)/numpy.float(n_loci)
correct_inds = numpy.where(relatedness_mat < .9)
phenotype_pairwise_difs = numpy.ones((n_pops,n_pops),dtype='float')
relatedness_list = relatedness_mat[correct_inds] - numpy.mean(relatedness_mat[correct_inds],dtype='float')
for i in range(n_pops):
for j in range(n_pops):
phenotype_pairwise_difs[i,j] = (phenotype_list[i] - phenotype_list[j])**2
# #Center y data
phenotype_pairwise_difs_list = phenotype_pairwise_difs[correct_inds] - numpy.mean(phenotype_pairwise_difs[correct_inds])
#Calculate h^2
betahat = 1./numpy.sum(relatedness_list*relatedness_list)*numpy.sum(relatedness_list*phenotype_pairwise_difs_list)
# (binned_relatedness_list, bin_locs, bin_nums) = scipy.stats.binned_statistic(relatedness_list, phenotype_pairwise_difs_list, bins=100)
# pt.scatter(bin_locs[:-1], binned_relatedness_list)
# pt.plot(numpy.arange(-1,1,.01),betahat*numpy.arange(-1,1,.01),'k')
# pt.show()
h2 = betahat/-2./numpy.var(phenotype_list)
# #Bootstrap to calculate confidence intervals
h2_bs = []
for k in range(1000):
bootstrap_inds = numpy.random.choice(numpy.arange(n_segs), size=n_segs)
expanded_indices = []
for bootstrap_ind in bootstrap_inds:
expanded_indices.extend(numpy.where(helper_matrix[:,bootstrap_ind] == 1)[0])
phenotype_pairwise_difs_bs = phenotype_pairwise_difs[expanded_indices, :][:, expanded_indices]
#print phenotype_pairwise_difs_bs.shape
relatedness_bs = relatedness_mat[expanded_indices, :][:, expanded_indices]
phenotype_list_bs = phenotype_list[expanded_indices]
correct_inds_bs = numpy.where(relatedness_bs < .9)
#re-center y data
phenotype_pairwise_difs_list_bs = (phenotype_pairwise_difs_bs[correct_inds_bs] - numpy.mean(phenotype_pairwise_difs_bs[correct_inds_bs])).flatten()
relatedness_list_bs = (relatedness_bs[correct_inds_bs] - numpy.mean(relatedness_bs[correct_inds_bs],dtype='float')).flatten()
# #((X^TX)^-1)XTy
betahat = 1./numpy.sum(relatedness_list_bs*relatedness_list_bs)*numpy.sum(relatedness_list_bs*phenotype_pairwise_difs_list_bs)
h2_bs.append(betahat/-2./numpy.var(phenotype_list_bs))
return h2, [numpy.percentile(h2_bs,2.5), numpy.percentile(h2_bs,97.5)]
#return h2, [numpy.percentile(h2_bs,25), numpy.percentile(h2_bs,75)]
def rsq(init_fit_vector, delta_fit_vector, helper_matrix):
rsq = scipy.stats.pearsonr(numpy.dot(helper_matrix,init_fit_vector),delta_fit_vector)[0]**2
return rsq
def rsq_with_bootstrap(init_fit_vector, delta_fit_vector, helper_matrix):
rsq = scipy.stats.pearsonr(numpy.dot(helper_matrix,init_fit_vector),delta_fit_vector)[0]**2
n_segs = len(init_fit_vector)
rsq_bs = []
for k in range(1000):
bootstrap_inds = numpy.random.choice(numpy.arange(n_segs), size=n_segs)
expanded_indices = []
for bootstrap_ind in bootstrap_inds:
expanded_indices.extend(numpy.where(helper_matrix[:,bootstrap_ind] == 1)[0])
init_fit_vector_bs = init_fit_vector[bootstrap_inds]
delta_fit_vector_bs = delta_fit_vector[expanded_indices]
helper_matrix_bs = helper_matrix[expanded_indices,:][:,bootstrap_inds]
rsq_bs.append(scipy.stats.pearsonr(numpy.dot(helper_matrix_bs,init_fit_vector_bs),delta_fit_vector_bs)[0]**2)
#return rsq, [numpy.percentile(rsq_bs,2.5), numpy.percentile(rsq_bs,97.5)]
return rsq, [numpy.percentile(rsq_bs,25), numpy.percentile(rsq_bs,75)]
def rsq_linear_model(predictor_mat, delta_fit_vector, helper_matrix):
predictor_mat_expanded = numpy.dot(helper_matrix, predictor_mat)
#print predictor_mat_expanded.shape
beta = numpy.dot(numpy.linalg.inv(numpy.dot(predictor_mat_expanded.T, predictor_mat_expanded)), numpy.dot(predictor_mat_expanded.T, delta_fit_vector))
rsq = scipy.stats.pearsonr(numpy.dot(predictor_mat_expanded, beta),delta_fit_vector)[0]**2
return rsq
def rsq_with_bootstrap_linear_model(predictor_mat, delta_fit_vector, helper_matrix):
predictor_mat_expanded = numpy.dot(helper_matrix, predictor_mat)
beta = numpy.dot(numpy.linalg.inv(numpy.dot(predictor_mat_expanded.T, predictor_mat_expanded)), numpy.dot(predictor_mat_expanded.T, delta_fit_vector))
rsq = scipy.stats.pearsonr(numpy.dot(predictor_mat_expanded, beta),delta_fit_vector)[0]**2
n_segs = len(helper_matrix[0,:])
rsq_bs = []
for k in range(100):
bootstrap_inds = numpy.random.choice(numpy.arange(n_segs), size=n_segs)
expanded_indices = []
for bootstrap_ind in bootstrap_inds:
expanded_indices.extend(numpy.where(helper_matrix[:,bootstrap_ind] == 1)[0])
predictor_mat_expanded_bs = predictor_mat_expanded[expanded_indices,:]
delta_fit_vector_bs = delta_fit_vector[expanded_indices]
beta_bs = numpy.dot(numpy.linalg.inv(numpy.dot(predictor_mat_expanded_bs.T, predictor_mat_expanded_bs)), numpy.dot(predictor_mat_expanded_bs.T, delta_fit_vector_bs))
rsq_bs.append(scipy.stats.pearsonr(numpy.dot(predictor_mat_expanded_bs, beta_bs),delta_fit_vector_bs)[0]**2)
#return rsq, [numpy.percentile(rsq_bs,2.5), numpy.percentile(rsq_bs,97.5)]
return rsq, [numpy.percentile(rsq_bs,25), numpy.percentile(rsq_bs,75)]
def rsq_with_bootstrap_linear_model_ninetyfive(predictor_mat, delta_fit_vector, helper_matrix):
predictor_mat_expanded = numpy.dot(helper_matrix, predictor_mat)
beta = numpy.dot(numpy.linalg.inv(numpy.dot(predictor_mat_expanded.T, predictor_mat_expanded)), numpy.dot(predictor_mat_expanded.T, delta_fit_vector))
rsq = scipy.stats.pearsonr(numpy.dot(predictor_mat_expanded, beta),delta_fit_vector)[0]**2
n_segs = len(helper_matrix[0,:])
rsq_bs = []
for k in range(1000):
bootstrap_inds = numpy.random.choice(numpy.arange(n_segs), size=n_segs)
expanded_indices = []
for bootstrap_ind in bootstrap_inds:
expanded_indices.extend(numpy.where(helper_matrix[:,bootstrap_ind] == 1)[0])
predictor_mat_expanded_bs = predictor_mat_expanded[expanded_indices,:]
delta_fit_vector_bs = delta_fit_vector[expanded_indices]
beta_bs = numpy.dot(numpy.linalg.inv(numpy.dot(predictor_mat_expanded_bs.T, predictor_mat_expanded_bs)), numpy.dot(predictor_mat_expanded_bs.T, delta_fit_vector_bs))
rsq_bs.append(scipy.stats.pearsonr(numpy.dot(predictor_mat_expanded_bs, beta_bs),delta_fit_vector_bs)[0]**2)
return rsq, [numpy.percentile(rsq_bs,2.5), numpy.percentile(rsq_bs,97.5)]
#return rsq, [numpy.percentile(rsq_bs,25), numpy.percentile(rsq_bs,75)]
def broad_sense_Hsq(phenotypes, pops_per_seg, non_heritable_variances, helper_matrix):
non_heritable = 1./numpy.sum(pops_per_seg)*numpy.nansum(pops_per_seg*non_heritable_variances)
Hsq = (1 - non_heritable/numpy.var(phenotypes))
return Hsq
def broad_sense_Hsq_means(phenotypes, pops_per_seg, non_heritable_variances, helper_matrix):
non_heritable = 1./numpy.sum(pops_per_seg)*numpy.nansum(non_heritable_variances)
Hsq = (1 - non_heritable/numpy.var(phenotypes))
return Hsq
def broad_sense_Hsq_with_bootstrap(phenotypes, pops_per_seg, non_heritable_variances, helper_matrix):
non_heritable = 1./numpy.sum(pops_per_seg)*numpy.nansum(pops_per_seg*non_heritable_variances)
Hsq = (1 - non_heritable/numpy.var(phenotypes))
n_segs = len(pops_per_seg)
Hsq_bs = []
for k in range(1000):
bootstrap_inds = numpy.random.choice(numpy.arange(n_segs), size=n_segs)
expanded_indices = []
for bootstrap_ind in bootstrap_inds:
expanded_indices.extend(numpy.where(helper_matrix[:,bootstrap_ind] == 1)[0])
phenotypes_bs = phenotypes[expanded_indices]
non_heritable_variances_bs = non_heritable_variances[bootstrap_inds]
pops_per_seg_bs = pops_per_seg[bootstrap_inds]
non_heritable = 1./numpy.sum(pops_per_seg_bs)*numpy.nansum(pops_per_seg_bs*non_heritable_variances_bs)
Hsq_bs.append(1 - non_heritable/numpy.var(phenotypes_bs))
#return Hsq, [numpy.percentile(Hsq_bs,2.5),numpy.percentile(Hsq_bs,97.5)]
return Hsq, [numpy.percentile(Hsq_bs,25),numpy.percentile(Hsq_bs,75)]
def broad_sense_Hsq_with_bootstrap_ninetyfive(phenotypes, pops_per_seg, non_heritable_variances, helper_matrix):
non_heritable = 1./numpy.sum(pops_per_seg)*numpy.nansum(pops_per_seg*non_heritable_variances)
Hsq = (1 - non_heritable/numpy.var(phenotypes))
n_segs = len(pops_per_seg)
Hsq_bs = []
for k in range(1000):
bootstrap_inds = numpy.random.choice(numpy.arange(n_segs), size=n_segs)
expanded_indices = []
for bootstrap_ind in bootstrap_inds:
expanded_indices.extend(numpy.where(helper_matrix[:,bootstrap_ind] == 1)[0])
phenotypes_bs = phenotypes[expanded_indices]
non_heritable_variances_bs = non_heritable_variances[bootstrap_inds]
pops_per_seg_bs = pops_per_seg[bootstrap_inds]
non_heritable = 1./numpy.sum(pops_per_seg_bs)*numpy.nansum(pops_per_seg_bs*non_heritable_variances_bs)
Hsq_bs.append(1 - non_heritable/numpy.var(phenotypes_bs))
return Hsq, [numpy.percentile(Hsq_bs,2.5),numpy.percentile(Hsq_bs,97.5)]
#return Hsq, [numpy.percentile(Hsq_bs,25),numpy.percentile(Hsq_bs,75)]
def broad_sense_Hsq_init_fits(init_fits, std_errs):
num_segs = numpy.sum(1 - numpy.isnan(std_errs))
H2 = 1 - 1./num_segs*numpy.nansum(std_errs**2)/numpy.var(init_fits)
H2_bs = []
for k in range(1000):
bootstrap_inds = numpy.random.choice(numpy.arange(num_segs), size=num_segs)
init_fits_bs = init_fits[bootstrap_inds]
std_errs_bs = std_errs[bootstrap_inds]
H2_bs.append( 1 - 1./num_segs*numpy.nansum(std_errs_bs**2)/numpy.var(init_fits_bs) )
#return H2, [numpy.percentile(H2_bs,2.5),numpy.percentile(H2_bs,97.5)]
return H2, [numpy.percentile(H2_bs,25),numpy.percentile(H2_bs,75)] | {
"content_hash": "c346c35793c1c29a97edae476dc13aac",
"timestamp": "",
"source": "github",
"line_count": 998,
"max_line_length": 453,
"avg_line_length": 48.313627254509015,
"alnum_prop": 0.7259058008586183,
"repo_name": "erjerison/adaptability",
"id": "a78a4ed73df068218179e8cc937327763513b91d",
"size": "48217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "github_submission/calculate_narrow_sense_heritability.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "72001"
},
{
"name": "Python",
"bytes": "311493"
}
],
"symlink_target": ""
} |
import os
from rbuild_test import rbuildhelp
from testutils import mock
from rbuild import errors
class CheckoutTest(rbuildhelp.RbuildHelper):
def _getHandle(self):
productStore = mock.MockObject(stableReturnValues=True)
productStore.getRbuildConfigPath._mock.setReturn(
self.workDir + '/rbuildrc')
handle = self.getRbuildHandle(productStore=productStore)
handle.Checkout.registerCommands()
handle.Checkout.initialize()
return handle
def testUpdateCommandParsing(self):
handle = self._getHandle()
cmd = handle.Commands.getCommandClass('checkout')()
handle.productStore.update._mock.setDefaultReturn(None)
mock.mockMethod(handle.Checkout.checkoutPackageDefault)
mock.mockMethod(handle.Checkout.checkoutPackage)
mock.mockMethod(handle.Checkout.derivePackage)
mock.mockMethod(handle.Checkout.shadowPackage)
mock.mockMethod(handle.Checkout.newPackage)
cmd.runCommand(handle, {}, ['rbuild', 'checkout', 'foo'])
handle.Checkout.checkoutPackageDefault._mock.assertCalled('foo',
template=None,
factory=None)
cmd.runCommand(handle, {'template': 'default'},
['rbuild', 'checkout', 'foo'])
handle.Checkout.checkoutPackageDefault._mock.assertCalled('foo',
template='default',
factory=None)
cmd.runCommand(handle, {'template': 'default', 'factory': 'test'},
['rbuild', 'checkout', 'foo'])
handle.Checkout.checkoutPackageDefault._mock.assertCalled('foo',
template='default',
factory='test')
cmd.runCommand(handle, dict(derive=True), ['rbuild', 'checkout', 'foo'])
handle.Checkout.derivePackage._mock.assertCalled('foo')
cmd.runCommand(handle, dict(new=True), ['rbuild', 'checkout', 'foo'])
handle.Checkout.newPackage._mock.assertCalled('foo', template=None,
factory=None)
cmd.runCommand(handle, dict(new=True, template='default'),
['rbuild', 'checkout', 'foo'])
handle.Checkout.newPackage._mock.assertCalled('foo', template='default',
factory=None)
cmd.runCommand(handle, dict(shadow=True), ['rbuild', 'checkout', 'foo'])
handle.Checkout.shadowPackage._mock.assertCalled('foo')
# unknown arguments
self.assertRaises(errors.ParseError,
cmd.runCommand, handle, {}, ['rbuild', 'checkout', 'foo', 'bar'])
# two flags set
self.assertRaises(errors.ParseError,
cmd.runCommand, handle, dict(shadow=True, new=True),
['rbuild', 'checkout', 'foo'])
self.assertRaises(errors.ParseError,
cmd.runCommand, handle, dict(shadow=True, derive=True),
['rbuild', 'checkout', 'foo'])
def testCheckoutDefaultPackage(self):
handle = self._getHandle()
mock.mockMethod(handle.Checkout._getUpstreamPackage)
mock.mock(handle.Checkout, '_relPath')
mock.mockMethod(handle.Checkout.checkoutPackage)
mock.mockMethod(handle.Checkout.newPackage)
# RBLD-122: avoid traceback here, make sure it's a PluginError
# (to which we attach useful information...)
# APPENG-2757: Changed to MiccingProductStoreError
handle.productStore = None
self.assertRaises(errors.MissingProductStoreError,
handle.Checkout.checkoutPackageDefault, 'asdf')
# APPENG-2757: now mock out the product store
handle.productStore = mock.MockObject()
mock.mockMethod(handle.Checkout._getExistingPackage)
handle.Checkout._getExistingPackage._mock.setDefaultReturn(
self.makeTroveTuple('foo:source'))
handle.Checkout._relPath._mock.setDefaultReturn('./foo')
handle.Checkout.checkoutPackageDefault('foo')
handle.Checkout._getExistingPackage._mock.assertCalled('foo')
handle.Checkout.checkoutPackage._mock.assertCalled('foo')
handle.Checkout.checkoutPackageDefault('foo:source')
handle.Checkout._getExistingPackage._mock.assertCalled('foo:source')
handle.Checkout.checkoutPackage._mock.assertCalled('foo:source')
handle.Checkout._getExistingPackage._mock.setDefaultReturn(None)
handle.Checkout._getUpstreamPackage._mock.setDefaultReturn(None)
handle.Checkout.checkoutPackageDefault('foo')
handle.Checkout._getExistingPackage._mock.assertCalled('foo')
handle.Checkout._getUpstreamPackage._mock.assertCalled('foo')
handle.Checkout.newPackage._mock.assertCalled('foo', template=None,
factory=None)
handle.Checkout.checkoutPackageDefault('foo', template='default')
handle.Checkout._getExistingPackage._mock.assertCalled('foo')
handle.Checkout._getUpstreamPackage._mock.assertCalled('foo')
handle.Checkout.newPackage._mock.assertCalled('foo', template='default',
factory=None)
handle.Checkout.checkoutPackageDefault('foo', factory='thefact')
handle.Checkout._getExistingPackage._mock.assertCalled('foo')
handle.Checkout._getUpstreamPackage._mock.assertCalled('foo')
handle.Checkout.newPackage._mock.assertCalled('foo', template=None,
factory='thefact')
handle.Checkout._getUpstreamPackage._mock.setDefaultReturn(
self.makeTroveTuple('foo'))
err = self.assertRaises(errors.PluginError,
handle.Checkout.checkoutPackageDefault, 'foo')
expectedError = '\n'.join((
'The upstream source provides a version of this package.',
'Please specify:',
' --shadow to shadow this package',
' --derive to derive from it',
' --new to replace it with a new version'))
assert str(err) == expectedError
def testCheckoutPackage(self):
handle = self._getHandle()
handle.productStore.getActiveStageLabel._mock.setReturn(
'conary.rpath.com@rpl:1')
handle.productStore.getCheckoutDirectory._mock.setReturn(
'/path/to/foo', 'foo')
mock.mockMethod(handle.facade.conary.checkout)
handle.Checkout.checkoutPackage('foo')
handle.facade.conary.checkout._mock.assertCalled('foo',
'conary.rpath.com@rpl:1',
targetDir='/path/to/foo')
def testDerivePackage(self):
handle = self._getHandle()
from rbuild_plugins.checkout import derive
checkout = handle.Checkout
mock.mockMethod(checkout._getUpstreamPackage)
mock.mock(derive, 'derive')
fooTrove = self.makeTroveTuple('foo')
checkout._getUpstreamPackage._mock.setDefaultReturn(fooTrove)
checkout.derivePackage('foo')
derive.derive._mock.assertCalled(handle, fooTrove)
checkout._getUpstreamPackage._mock.setDefaultReturn(None)
err = self.assertRaises(errors.PluginError,
checkout.derivePackage, 'foo')
self.assertEquals(str(err), 'cannot derive foo: no upstream binary')
def testShadowPackage(self):
handle = self._getHandle()
checkout = handle.Checkout
mock.mockMethod(checkout._getUpstreamPackage)
mock.mockMethod(handle.facade.conary.shadowSourceForBinary)
mock.mockMethod(handle.facade.conary.checkout)
fooTrove = self.makeTroveTuple('foo')
checkout._getUpstreamPackage._mock.setDefaultReturn(fooTrove)
handle.productStore.getActiveStageLabel._mock.setDefaultReturn(
'localhost@rpl:1')
handle.productStore.getCheckoutDirectory._mock.setReturn(
'/path/to/foo', 'foo')
checkout.shadowPackage('foo')
handle.facade.conary.shadowSourceForBinary._mock.assertCalled(
fooTrove[0],
fooTrove[1],
fooTrove[2],
'localhost@rpl:1')
handle.facade.conary.checkout._mock.assertCalled('foo',
'localhost@rpl:1',
targetDir='/path/to/foo')
checkout._getUpstreamPackage._mock.setDefaultReturn(None)
err = self.assertRaises(errors.PluginError,
checkout.shadowPackage, 'foo')
self.assertEquals(str(err), 'cannot shadow foo: no upstream binary')
def testShadowRemotePackage(self):
handle = self._getHandle()
checkout = handle.Checkout
mock.mockMethod(checkout._getRemotePackage)
mock.mockMethod(handle.facade.conary.shadowSourceForBinary)
mock.mockMethod(handle.facade.conary.checkout)
fooTrove = self.makeTroveTuple('foo', 'foo.rpath.org@foo:1')
checkout._getRemotePackage._mock.setDefaultReturn(fooTrove)
handle.productStore.getActiveStageLabel._mock.setDefaultReturn(
'localhost@rpl:1')
handle.productStore.getCheckoutDirectory._mock.setReturn(
'/path/to/foo', 'foo')
checkout.shadowPackage('foo=foo.rpath.org@foo:1')
handle.facade.conary.shadowSourceForBinary._mock.assertCalled(
fooTrove[0],
fooTrove[1],
fooTrove[2],
'localhost@rpl:1')
handle.facade.conary.checkout._mock.assertCalled('foo',
'localhost@rpl:1',
targetDir='/path/to/foo')
checkout._getRemotePackage._mock.setDefaultReturn(None)
err = self.assertRaises(errors.PluginError,
checkout.shadowPackage, 'foo=foo.rpath.org@foo:1')
self.assertEquals(str(err), '%s:source does not exist on label %s.' % \
('foo', 'foo.rpath.org@foo:1'))
def testNewPackage(self):
handle = self._getHandle()
checkout = handle.Checkout
mock.mockMethod(handle.facade.conary.createNewPackage)
handle.productStore.getActiveStageLabel._mock.setDefaultReturn(
'foo.rpath.org@rpl:1')
handle.productStore.getCheckoutDirectory._mock.setReturn(
'/path/to/foo', 'foo')
handle.productStore.getCheckoutDirectory._mock.setReturn(
'/path/to/factory-foo',
'factory-foo')
mock.mockMethod(handle.Checkout._getExistingPackage)
handle.Checkout._getExistingPackage._mock.setDefaultReturn(None)
mock.mockMethod(handle.Checkout._getUpstreamPackage)
handle.Checkout._getUpstreamPackage._mock.setDefaultReturn(None)
mock.mock(handle.Checkout, '_relPath', './foo')
checkout.newPackage('foo')
handle.facade.conary.createNewPackage._mock.assertCalled('foo',
'foo.rpath.org@rpl:1',
targetDir='/path/to/foo',
template=None,
factory=None)
checkout.newPackage('foo', template = 'default')
handle.facade.conary.createNewPackage._mock.assertCalled('foo',
'foo.rpath.org@rpl:1',
targetDir='/path/to/foo',
template='default',
factory=None)
checkout.newPackage('foo', factory = 'thefact')
handle.facade.conary.createNewPackage._mock.assertCalled('foo',
'foo.rpath.org@rpl:1',
targetDir='/path/to/foo',
template=None,
factory='thefact')
checkout.newPackage('factory-foo') # do NOT provide "factory ="
handle.facade.conary.createNewPackage._mock.assertCalled('factory-foo',
'foo.rpath.org@rpl:1',
targetDir='/path/to/factory-foo',
template=None,
factory='factory')
# change _getExistingPackage to return a package, createNewPackage
# should not be called.
handle.Checkout._getExistingPackage._mock.setDefaultReturn(
self.makeTroveTuple('foo:source'))
err = self.assertRaises(errors.PluginError,
checkout.newPackage, 'foo')
handle.facade.conary.createNewPackage._mock.assertNotCalled()
self.assertEquals(str(err),
'\n'.join(('This package already exists in the product.',
'Use "rbuild checkout foo" to checkout the existing package '
'to modify its files, or give the new package a different name.')))
handle.Checkout._getExistingPackage._mock.setDefaultReturn(None)
handle.Checkout._getUpstreamPackage._mock.setDefaultReturn(
self.makeTroveTuple('foo:source'))
mock.mock(handle, 'ui')
handle.ui.getYn._mock.setDefaultReturn(True)
checkout.newPackage('foo')
handle.facade.conary.createNewPackage._mock.assertCalled('foo',
'foo.rpath.org@rpl:1',
targetDir='/path/to/foo',
template=None,
factory=None)
handle.ui.getYn._mock.setDefaultReturn(False)
checkout.newPackage('foo')
handle.facade.conary.createNewPackage._mock.assertNotCalled()
troveTup = self.makeTroveTuple('foobar:source', '/foo.rpath.org@foo:1//2//3')
handle.ui.getYn._mock.setDefaultReturn(True)
handle.Checkout._getExistingPackage._mock.setDefaultReturn(
troveTup)
mock.mockMethod(handle.facade.conary.detachPackage)
checkout.newPackage('foobar')
handle.facade.conary.detachPackage._mock.assertCalled(troveTup,
'/' + 'foo.rpath.org@rpl:1', None)
handle.ui.getYn._mock.setDefaultReturn(False)
checkout.newPackage('foobar')
handle.facade.conary.detachPackage._mock.assertNotCalled()
def testGetUpstreamPackage(self):
handle = self._getHandle()
checkout = handle.Checkout
conaryFacade = handle.facade.conary
fooTrove = self.makeTroveTuple('foo')
mock.mockMethod(conaryFacade._findPackageInSearchPaths)
conaryFacade._findPackageInSearchPaths._mock.setDefaultReturn([fooTrove])
self.assertEquals(checkout._getUpstreamPackage('foo'), fooTrove)
conaryFacade._findPackageInSearchPaths._mock.setDefaultReturn(None)
self.assertEquals(checkout._getUpstreamPackage('foo'), None)
def testGetRemotePackage(self):
handle = self._getHandle()
checkout = handle.Checkout
conaryFacade = handle.facade.conary
fooTrove = self.makeTroveTuple('foo', 'foo.rpath.org@foo:1')
mock.mockMethod(conaryFacade._findTrove)
conaryFacade._findTrove._mock.setDefaultReturn(fooTrove)
self.assertEquals(checkout._getRemotePackage('foo',
'foo.rpath.org@foo:1'), fooTrove)
self.assertEquals(checkout._getRemotePackage('foo:source',
'foo.rpath.org@foo:1'), fooTrove)
conaryFacade._findTrove._mock.setDefaultReturn(None)
self.assertEquals(checkout._getRemotePackage('foo',
'foo.rpath.org@foo:1'), None)
def testGetExistingPackage(self):
handle = self._getHandle()
checkout = handle.Checkout
fooSource = self.makeTroveTuple('foo:source')
handle.productStore.getActiveStageLabel._mock.setDefaultReturn(
'foo.rpath.org@rpl:1')
mock.mockMethod(handle.facade.conary._findTrove)
handle.facade.conary._findTrove._mock.setDefaultReturn(fooSource)
result = checkout._getExistingPackage('foo')
self.assertEquals(result, fooSource)
handle.facade.conary._findTrove._mock.assertCalled('foo:source',
'foo.rpath.org@rpl:1',
allowMissing=True)
handle.facade.conary._findTrove._mock.setDefaultReturn(None)
def test_relPath(self):
handle = self._getHandle()
checkout = handle.Checkout
self.assertEquals(checkout._relPath('/foo/bar', '/foo/bar'),
'.')
self.assertEquals(checkout._relPath('/foo/bar', '/foo/bar/baz'),
'./baz')
self.assertEquals(checkout._relPath('/foo/bar/baz', '/foo/bar'),
'..')
self.assertEquals(checkout._relPath('/foo/bar', '/foo/baz'),
'../baz')
self.assertEquals(checkout._relPath('/1/2/3/bar', '/1/2/4/baz'),
'../../4/baz')
os.chdir(self.workDir)
self.assertEquals(checkout._relPath(self.workDir+'/bar', 'baz'),
'../baz')
def testCommand(self):
self.getRbuildHandle()
self.checkRbuild('checkout foo --new --shadow --derive '
'--template=default --factory=thefact',
'rbuild_plugins.checkout.CheckoutCommand.runCommand',
[None, None, {'derive' : True, 'new' : True,
'shadow' : True, 'template' : 'default',
'factory' : 'thefact'},
['rbuild', 'checkout', 'foo']])
| {
"content_hash": "dce2775f64098d1ecc177a7c8bf87e63",
"timestamp": "",
"source": "github",
"line_count": 377,
"max_line_length": 85,
"avg_line_length": 51.61538461538461,
"alnum_prop": 0.5470476386248009,
"repo_name": "sassoftware/rbuild",
"id": "34e7bca5736a25e14f31e4f6be363a45c58a425c",
"size": "20065",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rbuild_test/unit_test/pluginstest/checkouttest/checkouttest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Cucumber",
"bytes": "355"
},
{
"name": "Makefile",
"bytes": "17287"
},
{
"name": "Python",
"bytes": "997556"
},
{
"name": "Shell",
"bytes": "4530"
}
],
"symlink_target": ""
} |
"""Tests for swift.obj.server"""
import six.moves.cPickle as pickle
import datetime
import json
import errno
import operator
import os
import mock
import six
from six import StringIO
import unittest
import math
import random
from shutil import rmtree
from time import gmtime, strftime, time, struct_time
from tempfile import mkdtemp
from hashlib import md5
import itertools
import tempfile
from collections import defaultdict
from contextlib import contextmanager
from eventlet import sleep, spawn, wsgi, listen, Timeout, tpool, greenthread
from eventlet.green import httplib
from nose import SkipTest
from swift import __version__ as swift_version
from swift.common.http import is_success
from test.unit import FakeLogger, debug_logger, mocked_http_conn, \
make_timestamp_iter
from test.unit import connect_tcp, readuntil2crlfs, patch_policies
from swift.obj import server as object_server
from swift.obj import diskfile
from swift.common import utils, bufferedhttp
from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \
NullLogger, storage_directory, public, replication
from swift.common import constraints
from swift.common.swob import Request, HeaderKeyDict, WsgiBytesIO
from swift.common.splice import splice
from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
POLICIES, EC_POLICY)
from swift.common.exceptions import DiskFileDeviceUnavailable
def mock_time(*args, **kwargs):
return 5000.0
test_policies = [
StoragePolicy(0, name='zero', is_default=True),
ECStoragePolicy(1, name='one', ec_type='jerasure_rs_vand',
ec_ndata=10, ec_nparity=4),
]
@contextmanager
def fake_spawn():
"""
Spawn and capture the result so we can later wait on it. This means we can
test code executing in a greenthread but still wait() on the result to
ensure that the method has completed.
"""
greenlets = []
def _inner_fake_spawn(func, *a, **kw):
gt = greenthread.spawn(func, *a, **kw)
greenlets.append(gt)
return gt
object_server.spawn = _inner_fake_spawn
with mock.patch('swift.obj.server.spawn', _inner_fake_spawn):
try:
yield
finally:
for gt in greenlets:
gt.wait()
@patch_policies(test_policies)
class TestObjectController(unittest.TestCase):
"""Test swift.obj.server.ObjectController"""
def setUp(self):
"""Set up for testing swift.object.server.ObjectController"""
utils.HASH_PATH_SUFFIX = 'endcap'
utils.HASH_PATH_PREFIX = 'startcap'
self.tmpdir = mkdtemp()
self.testdir = os.path.join(self.tmpdir,
'tmp_test_object_server_ObjectController')
mkdirs(os.path.join(self.testdir, 'sda1'))
self.conf = {'devices': self.testdir, 'mount_check': 'false',
'container_update_timeout': 0.0}
self.object_controller = object_server.ObjectController(
self.conf, logger=debug_logger())
self.object_controller.bytes_per_sync = 1
self._orig_tpool_exc = tpool.execute
tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
self.df_mgr = diskfile.DiskFileManager(self.conf,
self.object_controller.logger)
self.logger = debug_logger('test-object-controller')
def tearDown(self):
"""Tear down for testing swift.object.server.ObjectController"""
rmtree(self.tmpdir)
tpool.execute = self._orig_tpool_exc
def _stage_tmp_dir(self, policy):
mkdirs(os.path.join(self.testdir, 'sda1',
diskfile.get_tmp_dir(policy)))
def check_all_api_methods(self, obj_name='o', alt_res=None):
path = '/sda1/p/a/c/%s' % obj_name
body = 'SPECIAL_STRING'
op_table = {
"PUT": (body, alt_res or 201, ''), # create one
"GET": ('', alt_res or 200, body), # check it
"POST": ('', alt_res or 202, ''), # update it
"HEAD": ('', alt_res or 200, ''), # head it
"DELETE": ('', alt_res or 204, '') # delete it
}
for method in ["PUT", "GET", "POST", "HEAD", "DELETE"]:
in_body, res, out_body = op_table[method]
timestamp = normalize_timestamp(time())
req = Request.blank(
path, environ={'REQUEST_METHOD': method},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = in_body
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, res)
if out_body and (200 <= res < 300):
self.assertEqual(resp.body, out_body)
def test_REQUEST_SPECIAL_CHARS(self):
obj = 'special昆%20/%'
self.check_all_api_methods(obj)
def test_device_unavailable(self):
def raise_disk_unavail(*args, **kwargs):
raise DiskFileDeviceUnavailable()
self.object_controller.get_diskfile = raise_disk_unavail
self.check_all_api_methods(alt_res=507)
def test_allowed_headers(self):
dah = ['content-disposition', 'content-encoding', 'x-delete-at',
'x-object-manifest', 'x-static-large-object']
conf = {'devices': self.testdir, 'mount_check': 'false',
'allowed_headers': ','.join(['content-type'] + dah)}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.assertEqual(self.object_controller.allowed_headers, set(dah))
def test_POST_update_meta(self):
# Test swift.obj.server.ObjectController.POST
original_headers = self.object_controller.allowed_headers
test_headers = 'content-encoding foo bar'.split()
self.object_controller.allowed_headers = set(test_headers)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Content-Encoding': 'gzip',
'Foo': 'fooheader',
'Bar': 'barheader',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertTrue("X-Object-Meta-1" not in resp.headers and
"X-Object-Meta-Two" not in resp.headers and
"X-Object-Meta-3" in resp.headers and
"X-Object-Meta-4" in resp.headers and
"Foo" in resp.headers and
"Bar" in resp.headers and
"Baz" not in resp.headers and
"Content-Encoding" in resp.headers)
self.assertEqual(resp.headers['Content-Type'], 'application/x-test')
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertTrue("X-Object-Meta-1" not in resp.headers and
"X-Object-Meta-Two" not in resp.headers and
"X-Object-Meta-3" in resp.headers and
"X-Object-Meta-4" in resp.headers and
"Foo" in resp.headers and
"Bar" in resp.headers and
"Baz" not in resp.headers and
"Content-Encoding" in resp.headers)
self.assertEqual(resp.headers['Content-Type'], 'application/x-test')
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertTrue("X-Object-Meta-3" not in resp.headers and
"X-Object-Meta-4" not in resp.headers and
"Foo" not in resp.headers and
"Bar" not in resp.headers and
"Content-Encoding" not in resp.headers)
self.assertEqual(resp.headers['Content-Type'], 'application/x-test')
# test defaults
self.object_controller.allowed_headers = original_headers
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'X-Object-Meta-1': 'One',
'X-Object-Manifest': 'c/bar',
'Content-Encoding': 'gzip',
'Content-Disposition': 'bar',
'X-Static-Large-Object': 'True',
})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertTrue("X-Object-Meta-1" in resp.headers and
"Foo" not in resp.headers and
"Content-Encoding" in resp.headers and
"X-Object-Manifest" in resp.headers and
"Content-Disposition" in resp.headers and
"X-Static-Large-Object" in resp.headers)
self.assertEqual(resp.headers['Content-Type'], 'application/x-test')
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-3': 'Three',
'Foo': 'fooheader',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertTrue("X-Object-Meta-1" not in resp.headers and
"Foo" not in resp.headers and
"Content-Encoding" not in resp.headers and
"X-Object-Manifest" not in resp.headers and
"Content-Disposition" not in resp.headers and
"X-Object-Meta-3" in resp.headers and
"X-Static-Large-Object" in resp.headers)
self.assertEqual(resp.headers['Content-Type'], 'application/x-test')
# Test for empty metadata
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-3': ''})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.headers["x-object-meta-3"], '')
def test_POST_old_timestamp(self):
ts = time()
orig_timestamp = utils.Timestamp(ts).internal
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': orig_timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# Same timestamp should result in 409
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': orig_timestamp,
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Content-Encoding': 'gzip',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
# Earlier timestamp should result in 409
timestamp = normalize_timestamp(ts - 1)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-5': 'Five',
'X-Object-Meta-6': 'Six',
'Content-Encoding': 'gzip',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
def test_POST_conflicts_with_later_POST(self):
ts_iter = make_timestamp_iter()
t_put = next(ts_iter).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_post1 = next(ts_iter).internal
t_post2 = next(ts_iter).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post2})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post1})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_post2 + '.meta')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post1 + '.meta')
self.assertFalse(os.path.isfile(meta_file))
def test_POST_not_exist(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/fail',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_POST_invalid_path(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_no_timestamp(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_bad_timestamp(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': 'bad',
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_container_connection(self):
def mock_http_connect(response, with_exc=False):
class FakeConn(object):
def __init__(self, status, with_exc):
self.status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
self.with_exc = with_exc
def getresponse(self):
if self.with_exc:
raise Exception('test')
return self
def read(self, amt=None):
return ''
return lambda *args, **kwargs: FakeConn(response, with_exc)
ts = time()
timestamp = normalize_timestamp(ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Length': '0'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 1),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1'})
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(202)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 2),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1'})
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(202, with_exc=True)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 3),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new2'})
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(500)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
def test_POST_quarantine_zbyte(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
objfile.open()
file_name = os.path.basename(objfile._data_file)
with open(objfile._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(objfile._data_file)
with open(objfile._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
self.assertEqual(os.listdir(objfile._datadir)[0], file_name)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(objfile._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_PUT_invalid_path(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_timestamp(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT',
'CONTENT_LENGTH': '0'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_content_type(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_invalid_content_type(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6',
'Content-Type': '\xff\xff'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('Content-Type' in resp.body)
def test_PUT_no_content_length(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
del req.headers['Content-Length']
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 411)
def test_PUT_zero_content_length(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = ''
self.assertEqual(req.headers['Content-Length'], '0')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_bad_transfer_encoding(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
req.headers['Transfer-Encoding'] = 'bad'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_if_none_match_star(self):
# First PUT should succeed
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# File should already exist so it should fail
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_PUT_if_none_match(self):
# PUT with if-none-match set and nothing there should succeed
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': 'notthere'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# PUT with if-none-match of the object etag should fail
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '0b4c12d7e0a73840c1c4f148fda3b037'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_PUT_common(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'x-object-meta-test': 'one',
'Custom-Header': '*',
'X-Backend-Replication-Headers':
'Content-Type Content-Length'})
req.body = 'VERIFY'
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
self.object_controller.allowed_headers = ['Custom-Header']
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]),
'p', hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '6',
'ETag': '0b4c12d7e0a73840c1c4f148fda3b037',
'Content-Type': 'application/octet-stream',
'name': '/a/c/o',
'X-Object-Meta-Test': 'one',
'Custom-Header': '*'})
def test_PUT_overwrite(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_overwrite_w_delete_at(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'X-Delete-At': 9999999999,
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_old_timestamp(self):
ts = time()
orig_timestamp = utils.Timestamp(ts).internal
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': orig_timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(ts),
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(ts - 1),
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY THREE'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
def test_PUT_no_etag(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'text/plain'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_invalid_etag(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'text/plain',
'ETag': 'invalid'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_user_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY THREE'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY THREE')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '12',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
def test_PUT_etag_in_footer(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Etag': 'other-etag',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
obj_etag = md5("obj data").hexdigest()
footer_meta = json.dumps({"Etag": obj_etag})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.etag, obj_etag)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
with open(objfile) as fh:
self.assertEqual(fh.read(), "obj data")
def test_PUT_etag_in_footer_mismatch(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("green").hexdigest()})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"blue",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_meta_in_footer(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Object-Meta-X': 'Z',
'X-Object-Sysmeta-X': 'Z',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({
'X-Object-Meta-X': 'Y',
'X-Object-Sysmeta-X': 'Y',
})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"stuff stuff stuff",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp},
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.headers.get('X-Object-Meta-X'), 'Y')
self.assertEqual(resp.headers.get('X-Object-Sysmeta-X'), 'Y')
def test_PUT_missing_footer_checksum(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("obj data").hexdigest()})
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
# no Content-MD5
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_bad_footer_checksum(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("obj data").hexdigest()})
bad_footer_meta_cksum = md5(footer_meta + "bad").hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + bad_footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_bad_footer_json(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = "{{{[[{{[{[[{[{[[{{{[{{{{[[{{[{["
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_extra_mime_docs_ignored(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({'X-Object-Meta-Mint': 'pepper'})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary",
"This-Document-Is-Useless: yes",
"",
"blah blah I take up space",
"--boundary--"
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# swob made this into a StringIO for us
wsgi_input = req.environ['wsgi.input']
self.assertEqual(wsgi_input.tell(), len(wsgi_input.getvalue()))
def test_PUT_user_metadata_no_xattr(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY THREE'
def mock_get_and_setxattr(*args, **kargs):
error_num = errno.ENOTSUP if hasattr(errno, 'ENOTSUP') else \
errno.EOPNOTSUPP
raise IOError(error_num, 'Operation not supported')
with mock.patch('xattr.getxattr', mock_get_and_setxattr):
with mock.patch('xattr.setxattr', mock_get_and_setxattr):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_PUT_client_timeout(self):
class FakeTimeout(BaseException):
def __enter__(self):
raise self
def __exit__(self, typ, value, tb):
pass
# This is just so the test fails when run on older object server code
# instead of exploding.
if not hasattr(object_server, 'ChunkReadTimeout'):
object_server.ChunkReadTimeout = None
with mock.patch.object(object_server, 'ChunkReadTimeout', FakeTimeout):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Length': '6'})
req.environ['wsgi.input'] = WsgiBytesIO(b'VERIFY')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 408)
def test_PUT_system_metadata(self):
# check that sysmeta is stored in diskfile
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
def test_PUT_succeeds_with_later_POST(self):
ts_iter = make_timestamp_iter()
t_put = next(ts_iter).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_put2 = next(ts_iter).internal
t_post = next(ts_iter).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put2,
'Content-Length': 0,
'Content-Type': 'plain/text'},
)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_put2 + '.data')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post + '.meta')
self.assertTrue(os.path.isfile(meta_file))
def test_POST_system_metadata(self):
# check that diskfile sysmeta is not changed by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Sysmeta-Two': 'Not Two'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
# .meta file metadata should have only user meta items
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'X-Object-Meta-1': 'Not One'})
def test_PUT_then_fetch_system_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def check_response(resp):
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.headers['content-type'], 'text/plain')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two')
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_PUT_then_POST_then_fetch_system_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Sysmeta-Two': 'Not Two'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
def check_response(resp):
# user meta should be updated but not sysmeta
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.headers['content-type'], 'text/plain')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp2)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'Not One')
self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two')
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_PUT_with_replication_headers(self):
# check that otherwise disallowed headers are accepted when specified
# by X-Backend-Replication-Headers
# first PUT object
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'Content-Length': '14',
'Etag': '1000d172764c9dbc3a5798a67ec5bb76',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False'})
req.body = 'VERIFY SYSMETA'
# restrict set of allowed headers on this server
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
# X-Static-Large-Object is disallowed.
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'Content-Length': '14',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1'})
# PUT object again with X-Backend-Replication-Headers
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'text/plain',
'Content-Length': '14',
'Etag': '1000d172764c9dbc3a5798a67ec5bb76',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False',
'X-Backend-Replication-Headers':
'X-Static-Large-Object'})
req.body = 'VERIFY SYSMETA'
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.data')
# X-Static-Large-Object should be copied since it is now allowed by
# replication headers.
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp2,
'Content-Type': 'text/plain',
'Content-Length': '14',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False'})
def test_PUT_container_connection(self):
def mock_http_connect(response, with_exc=False):
class FakeConn(object):
def __init__(self, status, with_exc):
self.status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
self.with_exc = with_exc
def getresponse(self):
if self.with_exc:
raise Exception('test')
return self
def read(self, amt=None):
return ''
return lambda *args, **kwargs: FakeConn(response, with_exc)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with fake_spawn(), mock.patch.object(
object_server, 'http_connect',
mock_http_connect(201)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with fake_spawn(), mock.patch.object(
object_server, 'http_connect',
mock_http_connect(500)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with fake_spawn(), mock.patch.object(
object_server, 'http_connect',
mock_http_connect(500, with_exc=True)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_ssync_multi_frag(self):
timestamp = utils.Timestamp(time()).internal
def put_with_index(expected_rsp, frag_index, node_index=None):
data_file_tail = '#%d.data' % frag_index
headers = {'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'X-Backend-Ssync-Frag-Index': node_index,
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy)}
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(
resp.status_int, expected_rsp,
'got %s != %s for frag_index=%s node_index=%s' % (
resp.status_int, expected_rsp,
frag_index, node_index))
if expected_rsp == 409:
return
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(int(policy)),
'p', hash_path('a', 'c', 'o')))
data_file = os.path.join(obj_dir, timestamp) + data_file_tail
self.assertTrue(os.path.isfile(data_file),
'Expected file %r not found in %r for policy %r'
% (data_file, os.listdir(obj_dir), int(policy)))
for policy in POLICIES:
if policy.policy_type == EC_POLICY:
# upload with a ec-frag-index
put_with_index(201, 3)
# same timestamp will conflict a different ec-frag-index
put_with_index(409, 2)
# but with the ssync-frag-index (primary node) it will just
# save both!
put_with_index(201, 2, 2)
# but even with the ssync-frag-index we can still get a
# timestamp collisison if the file already exists
put_with_index(409, 3, 3)
# FWIW, ssync will never send in-consistent indexes - but if
# something else did, from the object server perspective ...
# ... the ssync-frag-index is canonical on the
# read/pre-existance check
put_with_index(409, 7, 2)
# ... but the ec-frag-index is canonical when it comes to on
# disk file
put_with_index(201, 7, 6)
def test_PUT_durable_files(self):
for policy in POLICIES:
timestamp = utils.Timestamp(int(time())).internal
data_file_tail = '.data'
headers = {'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
data_file_tail = '#2.data'
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(int(policy)),
'p', hash_path('a', 'c', 'o')))
data_file = os.path.join(obj_dir, timestamp) + data_file_tail
self.assertTrue(os.path.isfile(data_file),
'Expected file %r not found in %r for policy %r'
% (data_file, os.listdir(obj_dir), int(policy)))
durable_file = os.path.join(obj_dir, timestamp) + '.durable'
if policy.policy_type == EC_POLICY:
self.assertTrue(os.path.isfile(durable_file))
self.assertFalse(os.path.getsize(durable_file))
else:
self.assertFalse(os.path.isfile(durable_file))
rmtree(obj_dir)
def test_HEAD(self):
# Test swift.obj.server.ObjectController.HEAD
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse('X-Backend-Timestamp' in resp.headers)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 6)
self.assertEqual(resp.content_type, 'application/x-test')
self.assertEqual(resp.headers['content-type'], 'application/x-test')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"0b4c12d7e0a73840c1c4f148fda3b037"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-meta-two'], 'Two')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_HEAD_quarantine_zbyte(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
with open(disk_file._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(disk_file._data_file)
with open(disk_file._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
file_name = os.path.basename(disk_file._data_file)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_OPTIONS(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
server_handler = object_server.ObjectController(
conf, logger=debug_logger())
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = server_handler.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE \
SSYNC'.split():
self.assertTrue(
verb in resp.headers['Allow'].split(', '))
self.assertEqual(len(resp.headers['Allow'].split(', ')), 8)
self.assertEqual(resp.headers['Server'],
(server_handler.server_type + '/' + swift_version))
def test_GET(self):
# Test swift.obj.server.ObjectController.GET
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse('X-Backend-Timestamp' in resp.headers)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'VERIFY')
self.assertEqual(resp.content_length, 6)
self.assertEqual(resp.content_type, 'application/x-test')
self.assertEqual(resp.headers['content-length'], '6')
self.assertEqual(resp.headers['content-type'], 'application/x-test')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"0b4c12d7e0a73840c1c4f148fda3b037"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-meta-two'], 'Two')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=1-3'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'ERI')
self.assertEqual(resp.headers['content-length'], '3')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=1-'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'ERIFY')
self.assertEqual(resp.headers['content-length'], '5')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=-2'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'FY')
self.assertEqual(resp.headers['content-length'], '2')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application:octet-stream',
'Content-Length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_GET_if_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={
'If-Match': '"11111111111111111111111111111111", "%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={
'If-Match':
'"11111111111111111111111111111111", '
'"22222222222222222222222222222222"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_GET_if_match_etag_is_at(self):
headers = {
'X-Timestamp': utils.Timestamp(time()).internal,
'Content-Type': 'application/octet-stream',
'X-Object-Meta-Xtag': 'madeup',
}
req = Request.blank('/sda1/p/a/c/o', method='PUT',
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
real_etag = resp.etag
# match x-backend-etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# no match x-backend-etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag,
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# etag-is-at metadata doesn't exist, default to real etag
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag,
'X-Backend-Etag-Is-At': 'X-Object-Meta-Missing'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# sanity no-match with no etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# sanity match with no etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# sanity with no if-match
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_HEAD_if_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match': '"11111111111111111111111111111111", "%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match':
'"11111111111111111111111111111111", '
'"22222222222222222222222222222222"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_GET_if_none_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'X-Object-Meta-Soup': 'gazpacho',
'Content-Type': 'application/fizzbuzz',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
self.assertEqual(resp.headers['Content-Type'], 'application/fizzbuzz')
self.assertEqual(resp.headers['X-Object-Meta-Soup'], 'gazpacho')
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match':
'"11111111111111111111111111111111", '
'"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
def test_HEAD_if_none_match(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match':
'"11111111111111111111111111111111", '
'"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
def test_GET_if_modified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
timestamp = normalize_timestamp(int(time()))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp)))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
def test_HEAD_if_modified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = self.object_controller.GET(req)
self.assertEqual(resp.status_int, 304)
timestamp = normalize_timestamp(int(time()))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp)))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
def test_GET_if_unmodified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'X-Object-Meta-Burr': 'ito',
'Content-Type': 'application/cat-picture',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 9))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertEqual(resp.headers['Content-Type'],
'application/cat-picture')
self.assertEqual(resp.headers['X-Object-Meta-Burr'], 'ito')
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 9))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_HEAD_if_unmodified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp))))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)) - 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_GET_quarantine(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
etag = md5()
etag.update('VERIF')
etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
'Content-Length': 6, 'ETag': etag}
diskfile.write_metadata(disk_file._fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
body = resp.body # actually does quarantining
self.assertEqual(body, 'VERIFY')
self.assertEqual(os.listdir(quar_dir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_GET_quarantine_zbyte(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
with open(disk_file._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(disk_file._data_file)
with open(disk_file._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_GET_quarantine_range(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
etag = md5()
etag.update('VERIF')
etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
'Content-Length': 6, 'ETag': etag}
diskfile.write_metadata(disk_file._fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=0-4' # partial
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
resp.body
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
self.assertFalse(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=1-6' # partial
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
resp.body
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
self.assertFalse(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=0-14' # full
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
resp.body
self.assertTrue(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
@mock.patch("time.time", mock_time)
def test_DELETE(self):
# Test swift.obj.server.ObjectController.DELETE
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
# The following should have created a tombstone file
timestamp = normalize_timestamp(1000)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
ts_1000_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1000_file))
# There should now be a 1000 ts file.
self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
# The following should *not* have created a tombstone file.
timestamp = normalize_timestamp(999)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
ts_999_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_999_file))
self.assertTrue(os.path.isfile(ts_1000_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
orig_timestamp = utils.Timestamp(1002).internal
headers = {'X-Timestamp': orig_timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# There should now be 1000 ts and a 1001 data file.
data_1002_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
orig_timestamp + '.data')
self.assertTrue(os.path.isfile(data_1002_file))
self.assertEqual(len(os.listdir(os.path.dirname(data_1002_file))), 1)
# The following should *not* have created a tombstone file.
timestamp = normalize_timestamp(1001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
ts_1001_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_1001_file))
self.assertTrue(os.path.isfile(data_1002_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1001_file))), 1)
timestamp = normalize_timestamp(1003)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
ts_1003_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1003_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1003_file))), 1)
def test_DELETE_succeeds_with_later_POST(self):
ts_iter = make_timestamp_iter()
t_put = next(ts_iter).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_delete = next(ts_iter).internal
t_post = next(ts_iter).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': t_delete},
)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_delete + '.ts')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post + '.meta')
self.assertTrue(os.path.isfile(meta_file))
def test_DELETE_container_updates(self):
# Test swift.obj.server.ObjectController.DELETE and container
# updates, making sure container update is called in the correct
# state.
start = time()
orig_timestamp = utils.Timestamp(start)
headers = {'X-Timestamp': orig_timestamp.internal,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
calls_made = [0]
def our_container_update(*args, **kwargs):
calls_made[0] += 1
orig_cu = self.object_controller.container_update
self.object_controller.container_update = our_container_update
try:
# The following request should return 409 (HTTP Conflict). A
# tombstone file should not have been created with this timestamp.
timestamp = utils.Timestamp(start - 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
orig_timestamp.internal)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile))
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
self.assertEqual(0, calls_made[0])
# The following request should return 204, and the object should
# be truly deleted (container update is performed) because this
# timestamp is newer. A tombstone file should have been created
# with this timestamp.
timestamp = utils.Timestamp(start + 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(1, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
# The following request should return a 404, as the object should
# already have been deleted, but it should have also performed a
# container update because the timestamp is newer, and a tombstone
# file should also exist with this timestamp.
timestamp = utils.Timestamp(start + 0.00002)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(2, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
# The following request should return a 404, as the object should
# already have been deleted, and it should not have performed a
# container update because the timestamp is older, or created a
# tombstone file with this timestamp.
timestamp = utils.Timestamp(start + 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile))
self.assertEqual(2, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
finally:
self.object_controller.container_update = orig_cu
def test_object_update_with_offset(self):
ts = (utils.Timestamp(t).internal for t in
itertools.count(int(time())))
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
# create a new object
create_timestamp = next(ts)
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test1',
headers={'X-Timestamp': create_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/plain'})
with fake_spawn(), mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test1'),
'X-Etag': md5('test1').hexdigest(),
'X-Content-Type': 'text/plain',
'X-Timestamp': create_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back object
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(create_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
create_timestamp)
self.assertEqual(resp.body, 'test1')
# send an update with an offset
offset_timestamp = utils.Timestamp(
create_timestamp, offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test2',
headers={'X-Timestamp': offset_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/html'})
with fake_spawn(), mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test2'),
'X-Etag': md5('test2').hexdigest(),
'X-Content-Type': 'text/html',
'X-Timestamp': offset_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back new offset
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(offset_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
offset_timestamp)
self.assertEqual(resp.body, 'test2')
# now overwrite with a newer time
overwrite_timestamp = next(ts)
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test3',
headers={'X-Timestamp': overwrite_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/enriched'})
with fake_spawn(), mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test3'),
'X-Etag': md5('test3').hexdigest(),
'X-Content-Type': 'text/enriched',
'X-Timestamp': overwrite_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back overwrite
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(overwrite_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
overwrite_timestamp)
self.assertEqual(resp.body, 'test3')
# delete with an offset
offset_delete = utils.Timestamp(overwrite_timestamp,
offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': offset_delete,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with fake_spawn(), mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 204)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': offset_delete,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back offset delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Timestamp'], None)
self.assertEqual(resp.headers['X-Backend-Timestamp'], offset_delete)
# and one more delete with a newer timestamp
delete_timestamp = next(ts)
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': delete_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with fake_spawn(), mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 404)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': delete_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Timestamp'], None)
self.assertEqual(resp.headers['X-Backend-Timestamp'], delete_timestamp)
def test_call_bad_request(self):
# Test swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '400 ')
def test_call_not_found(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '404 ')
def test_call_bad_method(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'INVALID',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_call_name_collision(self):
def my_check(*args):
return False
def my_hash_path(*args):
return md5('collide').hexdigest()
with mock.patch("swift.obj.diskfile.hash_path", my_hash_path):
with mock.patch("swift.obj.server.check_object_creation",
my_check):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'CONTENT_TYPE': 'text/html',
'HTTP_X_TIMESTAMP': normalize_timestamp(1.2),
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '201 ')
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/b/d/x',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'CONTENT_TYPE': 'text/html',
'HTTP_X_TIMESTAMP': normalize_timestamp(1.3),
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '403 ')
def test_invalid_method_doesnt_exist(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'method_doesnt_exist',
'PATH_INFO': '/sda1/p/a/c/o'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_invalid_method_is_not_public(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': '__init__',
'PATH_INFO': '/sda1/p/a/c/o'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_chunked_put(self):
listener = listen(('localhost', 0))
port = listener.getsockname()[1]
killer = spawn(wsgi.server, listener, self.object_controller,
NullLogger())
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('PUT /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Content-Type: text/plain\r\n'
'Connection: close\r\nX-Timestamp: %s\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n' % normalize_timestamp(
1.0))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
response = fd.read()
self.assertEqual(response, 'oh hai')
killer.kill()
def test_chunked_content_length_mismatch_zero(self):
listener = listen(('localhost', 0))
port = listener.getsockname()[1]
killer = spawn(wsgi.server, listener, self.object_controller,
NullLogger())
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('PUT /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Content-Type: text/plain\r\n'
'Connection: close\r\nX-Timestamp: %s\r\n'
'Content-Length: 0\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n' % normalize_timestamp(
1.0))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
response = fd.read()
self.assertEqual(response, 'oh hai')
killer.kill()
def test_max_object_name_length(self):
timestamp = normalize_timestamp(time())
max_name_len = constraints.MAX_OBJECT_NAME_LENGTH
req = Request.blank(
'/sda1/p/a/c/' + ('1' * max_name_len),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'DATA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/' + ('2' * (max_name_len + 1)),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'DATA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_max_upload_time(self):
class SlowBody(object):
def __init__(self):
self.sent = 0
def read(self, size=-1):
if self.sent < 4:
sleep(0.1)
self.sent += 1
return ' '
return ''
def set_hundred_continue_response_headers(*a, **kw):
pass
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.object_controller.max_upload_time = 0.1
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 408)
def test_short_body(self):
class ShortBody(object):
def __init__(self):
self.sent = False
def read(self, size=-1):
if not self.sent:
self.sent = True
return ' '
return ''
def set_hundred_continue_response_headers(*a, **kw):
pass
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': ShortBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 499)
def test_bad_sinces(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'},
body=' ')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': 'Not a valid date'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': 'Not a valid date'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
too_big_date_list = list(datetime.datetime.max.timetuple())
too_big_date_list[0] += 1 # bump up the year
too_big_date = strftime(
"%a, %d %b %Y %H:%M:%S UTC", struct_time(too_big_date_list))
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': too_big_date})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_content_encoding(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'},
body=' ')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-encoding'], 'gzip')
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-encoding'], 'gzip')
def test_async_update_http_connect(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
given_args = []
def fake_http_connect(*args):
given_args.extend(args)
raise Exception('test')
orig_http_connect = object_server.http_connect
try:
object_server.http_connect = fake_http_connect
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': 'set',
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
finally:
object_server.http_connect = orig_http_connect
self.assertEqual(
given_args,
['127.0.0.1', '1234', 'sdc1', 1, 'PUT', '/a/c/o', {
'x-timestamp': '1', 'x-out': 'set',
'user-agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy)}])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(37, 'fantastico')])
def test_updating_multiple_delete_at_container_servers(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
policy = random.choice(list(POLICIES))
self.object_controller.expiring_objects_account = 'exp'
self.object_controller.expiring_objects_container_divisor = 60
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.items()
if v is not None))
return SuccessfulFakeConn()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1',
'X-Delete-At': 9999999999,
'X-Delete-At-Container': '9999999960',
'X-Delete-At-Host': "10.1.1.1:6001,10.2.2.2:6002",
'X-Delete-At-Partition': '6237',
'X-Delete-At-Device': 'sdp,sdq'})
with fake_spawn(), mock.patch.object(
object_server, 'http_connect', fake_http_connect):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEqual(len(http_connect_args), 3)
self.assertEqual(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c/o',
'device': 'sdb1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy),
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[1],
{'ipaddr': '10.1.1.1',
'port': '6001',
'path': '/exp/9999999960/9999999999-a/c/o',
'device': 'sdp',
'partition': '6237',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[2],
{'ipaddr': '10.2.2.2',
'port': '6002',
'path': '/exp/9999999960/9999999999-a/c/o',
'device': 'sdq',
'partition': '6237',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
'x-trans-id': '-'})})
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(26, 'twice-thirteen')])
def test_updating_multiple_container_servers(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.items()
if v is not None))
return SuccessfulFakeConn()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': '26',
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5, 6.7.8.9:10',
'X-Container-Device': 'sdb1, sdf1'})
with fake_spawn(), mock.patch.object(
object_server, 'http_connect', fake_http_connect):
req.get_response(self.object_controller)
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEqual(len(http_connect_args), 2)
self.assertEqual(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c/o',
'device': 'sdb1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'X-Backend-Storage-Policy-Index': '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[1],
{'ipaddr': '6.7.8.9',
'port': '10',
'path': '/a/c/o',
'device': 'sdf1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'X-Backend-Storage-Policy-Index': '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'x-trans-id': '-'})})
def test_object_delete_at_aysnc_update(self):
policy = random.choice(list(POLICIES))
ts = (utils.Timestamp(t) for t in
itertools.count(int(time())))
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
put_timestamp = next(ts).internal
delete_at_timestamp = utils.normalize_delete_at_timestamp(
next(ts).normal)
delete_at_container = (
int(delete_at_timestamp) /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'X-Container-Host': '10.0.0.1:6001',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'X-Delete-At': delete_at_timestamp,
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': 'p',
'X-Delete-At-Host': '10.0.0.2:6002',
'X-Delete-At-Device': 'sda1',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
req = Request.blank(
'/sda1/p/a/c/o', method='PUT', body='', headers=headers)
with fake_spawn(), mocked_http_conn(
500, 500, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(2, len(container_updates))
delete_at_update, container_update = container_updates
# delete_at_update
ip, port, method, path, headers = delete_at_update
self.assertEqual(ip, '10.0.0.2')
self.assertEqual(port, '6002')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/.expiring_objects/%s/%s-a/c/o' %
(delete_at_container, delete_at_timestamp))
expected = {
'X-Timestamp': put_timestamp,
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
# container_update
ip, port, method, path, headers = container_update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '6001')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': put_timestamp,
'X-Backend-Storage-Policy-Index': int(policy),
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
# check async pendings
async_dir = os.path.join(self.testdir, 'sda1',
diskfile.get_async_dir(policy))
found_files = []
for root, dirs, files in os.walk(async_dir):
for f in files:
async_file = os.path.join(root, f)
found_files.append(async_file)
data = pickle.load(open(async_file))
if data['account'] == 'a':
self.assertEqual(
int(data['headers']
['X-Backend-Storage-Policy-Index']), int(policy))
elif data['account'] == '.expiring_objects':
self.assertEqual(
int(data['headers']
['X-Backend-Storage-Policy-Index']), 0)
else:
self.fail('unexpected async pending data')
self.assertEqual(2, len(found_files))
def test_async_update_saves_on_exception(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(*args):
raise Exception('test')
orig_http_connect = object_server.http_connect
try:
object_server.http_connect = fake_http_connect
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': 'set',
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
async_dir = diskfile.get_async_dir(policy)
self.assertEqual(
pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': 'set',
'user-agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy)},
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
def test_async_update_saves_on_non_2xx(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(status):
class FakeConn(object):
def __init__(self, status):
self.status = status
def getresponse(self):
return self
def read(self):
return ''
return lambda *args: FakeConn(status)
orig_http_connect = object_server.http_connect
try:
for status in (199, 300, 503):
object_server.http_connect = fake_http_connect(status)
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status),
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
async_dir = diskfile.get_async_dir(policy)
self.assertEqual(
pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': str(status),
'user-agent':
'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index':
int(policy)},
'account': 'a', 'container': 'c', 'obj': 'o',
'op': 'PUT'})
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_async_update_does_not_save_on_2xx(self):
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(status):
class FakeConn(object):
def __init__(self, status):
self.status = status
def getresponse(self):
return self
def read(self):
return ''
return lambda *args: FakeConn(status)
orig_http_connect = object_server.http_connect
try:
for status in (200, 299):
object_server.http_connect = fake_http_connect(status)
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status)}, 'sda1', 0)
self.assertFalse(
os.path.exists(os.path.join(
self.testdir, 'sda1', 'async_pending', 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-0000000001.00000')))
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_async_update_saves_on_timeout(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect():
class FakeConn(object):
def getresponse(self):
return sleep(1)
return lambda *args: FakeConn()
orig_http_connect = object_server.http_connect
try:
for status in (200, 299):
object_server.http_connect = fake_http_connect()
self.object_controller.node_timeout = 0.001
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status)}, 'sda1',
policy)
async_dir = diskfile.get_async_dir(policy)
self.assertTrue(
os.path.exists(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal)))
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_container_update_no_async_update(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.container_update(
'PUT', 'a', 'c', 'o', req, {
'x-size': '0', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-timestamp': '1'},
'sda1', policy)
self.assertEqual(given_args, [])
def test_container_update_success(self):
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
req = Request.blank(
'/sda1/0/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain'}, body='')
with fake_spawn(), mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(container_updates), 1)
ip, port, method, path, headers = container_updates[0]
self.assertEqual(ip, 'chost')
self.assertEqual(port, 'cport')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/cdevice/cpartition/a/c/o')
self.assertEqual(headers, HeaderKeyDict({
'user-agent': 'object-server %s' % os.getpid(),
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain',
'x-timestamp': utils.Timestamp(1).internal,
'X-Backend-Storage-Policy-Index': '0', # default when not given
'x-trans-id': '123',
'referer': 'PUT http://localhost/sda1/0/a/c/o'}))
def test_container_update_overrides(self):
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
headers = {
'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain',
'X-Backend-Container-Update-Override-Etag': 'override_etag',
'X-Backend-Container-Update-Override-Content-Type': 'override_val',
'X-Backend-Container-Update-Override-Foo': 'bar',
'X-Backend-Container-Ignored': 'ignored'
}
req = Request.blank('/sda1/0/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers, body='')
with fake_spawn(), mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(container_updates), 1)
ip, port, method, path, headers = container_updates[0]
self.assertEqual(ip, 'chost')
self.assertEqual(port, 'cport')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/cdevice/cpartition/a/c/o')
self.assertEqual(headers, HeaderKeyDict({
'user-agent': 'object-server %s' % os.getpid(),
'x-size': '0',
'x-etag': 'override_etag',
'x-content-type': 'override_val',
'x-timestamp': utils.Timestamp(1).internal,
'X-Backend-Storage-Policy-Index': '0', # default when not given
'x-trans-id': '123',
'referer': 'PUT http://localhost/sda1/0/a/c/o',
'x-foo': 'bar'}))
def test_container_update_async(self):
policy = random.choice(list(POLICIES))
req = Request.blank(
'/sda1/0/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain',
'X-Object-Sysmeta-Ec-Frag-Index': 0,
'X-Backend-Storage-Policy-Index': int(policy)}, body='')
given_args = []
def fake_pickle_async_update(*args):
given_args[:] = args
diskfile_mgr = self.object_controller._diskfile_router[policy]
diskfile_mgr.pickle_async_update = fake_pickle_async_update
with fake_spawn(), mocked_http_conn(500) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(given_args), 7)
(objdevice, account, container, obj, data, timestamp,
policy) = given_args
self.assertEqual(objdevice, 'sda1')
self.assertEqual(account, 'a')
self.assertEqual(container, 'c')
self.assertEqual(obj, 'o')
self.assertEqual(timestamp, utils.Timestamp(1).internal)
self.assertEqual(policy, policy)
self.assertEqual(data, {
'headers': HeaderKeyDict({
'X-Size': '0',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Content-Type': 'text/plain',
'X-Timestamp': utils.Timestamp(1).internal,
'X-Trans-Id': '123',
'Referer': 'PUT http://localhost/sda1/0/a/c/o',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e'}),
'obj': 'o',
'account': 'a',
'container': 'c',
'op': 'PUT'})
def test_container_update_as_greenthread(self):
greenthreads = []
saved_spawn_calls = []
called_async_update_args = []
def local_fake_spawn(func, *a, **kw):
saved_spawn_calls.append((func, a, kw))
return mock.MagicMock()
def local_fake_async_update(*a, **kw):
# just capture the args to see that we would have called
called_async_update_args.append([a, kw])
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': 0,
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1'})
with mock.patch.object(object_server, 'spawn',
local_fake_spawn):
with mock.patch.object(self.object_controller,
'async_update',
local_fake_async_update):
resp = req.get_response(self.object_controller)
# check the response is completed and successful
self.assertEqual(resp.status_int, 201)
# check that async_update hasn't been called
self.assertFalse(len(called_async_update_args))
# now do the work in greenthreads
for func, a, kw in saved_spawn_calls:
gt = spawn(func, *a, **kw)
greenthreads.append(gt)
# wait for the greenthreads to finish
for gt in greenthreads:
gt.wait()
# check that the calls to async_update have happened
headers_out = {'X-Size': '0',
'X-Content-Type': 'application/burrito',
'X-Timestamp': '0000012345.00000',
'X-Trans-Id': '-',
'Referer': 'PUT http://localhost/sda1/p/a/c/o',
'X-Backend-Storage-Policy-Index': '0',
'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e'}
expected = [('PUT', 'a', 'c', 'o', '1.2.3.4:5', '20', 'sdb1',
headers_out, 'sda1', POLICIES[0]),
{'logger_thread_locals': (None, None)}]
self.assertEqual(called_async_update_args, [expected])
def test_container_update_as_greenthread_with_timeout(self):
'''
give it one container to update (for only one greenthred)
fake the greenthred so it will raise a timeout
test that the right message is logged and the method returns None
'''
called_async_update_args = []
def local_fake_spawn(func, *a, **kw):
m = mock.MagicMock()
def wait_with_error():
raise Timeout()
m.wait = wait_with_error # because raise can't be in a lambda
return m
def local_fake_async_update(*a, **kw):
# just capture the args to see that we would have called
called_async_update_args.append([a, kw])
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': 0,
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1'})
with mock.patch.object(object_server, 'spawn',
local_fake_spawn):
with mock.patch.object(self.object_controller,
'container_update_timeout',
1.414213562):
resp = req.get_response(self.object_controller)
# check the response is completed and successful
self.assertEqual(resp.status_int, 201)
# check that the timeout was logged
expected_logged_error = "Container update timeout (1.4142s) " \
"waiting for [('1.2.3.4:5', 'sdb1')]"
self.assertTrue(
expected_logged_error in
self.object_controller.logger.get_lines_for_level('debug'))
def test_container_update_bad_args(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost,badhost',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch.object(self.object_controller, 'async_update',
fake_async_update):
self.object_controller.container_update(
'PUT', 'a', 'c', 'o', req, {
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-timestamp': '1'},
'sda1', policy)
self.assertEqual(given_args, [])
errors = self.object_controller.logger.get_lines_for_level('error')
self.assertEqual(len(errors), 1)
msg = errors[0]
self.assertTrue('Container update failed' in msg)
self.assertTrue('different numbers of hosts and devices' in msg)
self.assertTrue('chost,badhost' in msg)
self.assertTrue('cdevice' in msg)
def test_delete_at_update_on_put(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch.object(self.object_controller, 'async_update',
fake_async_update):
self.object_controller.delete_at_update(
'DELETE', 2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(
given_args, [
'DELETE', '.expiring_objects', '0000000000',
'0000000002-a/c/o', None, None, None,
HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '123',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_negative(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
# Test negative is reset to 0
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234', 'X-Backend-Storage-Policy-Index':
int(policy)})
self.object_controller.delete_at_update(
'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(given_args, [
'DELETE', '.expiring_objects', '0000000000', '0000000000-a/c/o',
None, None, None,
HeaderKeyDict({
# the expiring objects account is always 0
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_cap(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
# Test past cap is reset to cap
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update(
'DELETE', 12345678901, 'a', 'c', 'o', req, 'sda1', policy)
expiring_obj_container = given_args.pop(2)
expected_exp_cont = utils.get_expirer_container(
utils.normalize_delete_at_timestamp(12345678901),
86400, 'a', 'c', 'o')
self.assertEqual(expiring_obj_container, expected_exp_cont)
self.assertEqual(given_args, [
'DELETE', '.expiring_objects', '9999999999-a/c/o',
None, None, None,
HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_update_put_with_info(self):
# Keep next test,
# test_delete_at_update_put_with_info_but_missing_container, in sync
# with this one but just missing the X-Delete-At-Container header.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Delete-At-Container': '0',
'X-Delete-At-Host': '127.0.0.1:1234',
'X-Delete-At-Partition': '3',
'X-Delete-At-Device': 'sdc1',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
given_args, [
'PUT', '.expiring_objects', '0000000000', '0000000002-a/c/o',
'127.0.0.1:1234',
'3', 'sdc1', HeaderKeyDict({
# the .expiring_objects account is always policy-0
'X-Backend-Storage-Policy-Index': 0,
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain',
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_update_put_with_info_but_missing_container(self):
# Same as previous test, test_delete_at_update_put_with_info, but just
# missing the X-Delete-At-Container header.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
self.object_controller.logger = self.logger
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Delete-At-Host': '127.0.0.1:1234',
'X-Delete-At-Partition': '3',
'X-Delete-At-Device': 'sdc1',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
self.logger.get_lines_for_level('warning'),
['X-Delete-At-Container header must be specified for expiring '
'objects background PUT to work properly. Making best guess as '
'to the container name for now.'])
def test_delete_at_update_delete(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('DELETE', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
given_args, [
'DELETE', '.expiring_objects', '0000000000',
'0000000002-a/c/o', None, None,
None, HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'DELETE http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_backend_replication(self):
# If X-Backend-Replication: True delete_at_update should completely
# short-circuit.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Replication': 'True',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update(
'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(given_args, [])
def test_POST_calls_delete_at(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 2})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [])
sleep(.00001)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(given_args, [])
sleep(.00001)
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = str(int(time() + 1000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'application/x-test',
'X-Delete-At': delete_at_timestamp1,
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
delete_at_timestamp2 = str(int(time() + 2000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'application/x-test',
'X-Delete-At': delete_at_timestamp2,
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp2), 'a', 'c', 'o',
given_args[5], 'sda1', policy,
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
def test_PUT_calls_delete_at(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 4})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [])
sleep(.00001)
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = str(int(time() + 1000))
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': delete_at_timestamp1,
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 3})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
delete_at_timestamp2 = str(int(time() + 2000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp2,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': delete_at_timestamp2,
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 3})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp2), 'a', 'c', 'o',
given_args[5], 'sda1', policy,
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
def test_GET_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
orig_time = object_server.time.time
try:
t = time()
object_server.time.time = lambda: t
delete_at_timestamp = int(t + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
put_timestamp = normalize_timestamp(test_time - 1000)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
finally:
object_server.time.time = orig_time
orig_time = object_server.time.time
try:
t = time() + 2
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(t)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(put_timestamp))
finally:
object_server.time.time = orig_time
def test_HEAD_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
orig_time = object_server.time.time
try:
t = time()
delete_at_timestamp = int(t + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
object_server.time.time = lambda: t
put_timestamp = normalize_timestamp(test_time - 1000)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
finally:
object_server.time.time = orig_time
orig_time = object_server.time.time
try:
t = time() + 2
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(put_timestamp))
finally:
object_server.time.time = orig_time
def test_POST_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(test_time - 1500)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
delete_at_timestamp = int(time() + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 1000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
orig_time = object_server.time.time
try:
t = time() + 2
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
finally:
object_server.time.time = orig_time
def test_DELETE_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
orig_time = object_server.time.time
try:
t = test_time + 100
object_server.time.time = lambda: float(t)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
finally:
object_server.time.time = orig_time
def test_DELETE_if_delete_at_expired_still_deletes(self):
test_time = time() + 10
test_timestamp = normalize_timestamp(test_time)
delete_at_time = int(test_time + 10)
delete_at_timestamp = str(delete_at_time)
delete_at_container = str(
delete_at_time /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': test_timestamp,
'X-Delete-At': delete_at_timestamp,
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# sanity
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': test_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'TEST')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(test_timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
# move time past expirery
with mock.patch('swift.obj.diskfile.time') as mock_time:
mock_time.time.return_value = test_time + 100
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': test_timestamp})
resp = req.get_response(self.object_controller)
# request will 404
self.assertEqual(resp.status_int, 404)
# but file still exists
self.assertTrue(os.path.isfile(objfile))
# make the x-if-delete-at with some wrong bits
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': int(time() + 1)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertTrue(os.path.isfile(objfile))
# make the x-if-delete-at with all the right bits
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
self.assertFalse(os.path.isfile(objfile))
# make the x-if-delete-at with all the right bits (again)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertFalse(os.path.isfile(objfile))
# make the x-if-delete-at for some not found
req = Request.blank(
'/sda1/p/a/c/o-not-found',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_DELETE_if_delete_at(self):
test_time = time() + 10000
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 99),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 98)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
delete_at_timestamp = int(test_time - 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 97),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 95),
'X-If-Delete-At': str(int(test_time))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 95)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
delete_at_timestamp = int(test_time - 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 94),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': str(int(test_time))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': 'abc'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_DELETE_calls_delete_at(self):
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = int(time() + 1000)
delete_at_container1 = str(
delete_at_timestamp1 /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': str(delete_at_timestamp1),
'X-Delete-At-Container': delete_at_container1})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', POLICIES[0]])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(given_args, [
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', POLICIES[0]])
def test_PUT_delete_at_in_past(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'X-Delete-At': str(int(time() - 1)),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_POST_delete_at_in_past(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time() + 1),
'X-Delete-At': str(int(time() - 1))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_REPLICATE_works(self):
def fake_get_hashes(*args, **kwargs):
return 0, {1: 2}
def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes
was_tpool_exe = tpool.execute
try:
diskfile.DiskFileManager._get_hashes = fake_get_hashes
tpool.execute = my_tpool_execute
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
p_data = pickle.loads(resp.body)
self.assertEqual(p_data, {1: 2})
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_timeout(self):
def fake_get_hashes(*args, **kwargs):
raise Timeout()
def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes
was_tpool_exe = tpool.execute
try:
diskfile.DiskFileManager._get_hashes = fake_get_hashes
tpool.execute = my_tpool_execute
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
self.assertRaises(Timeout, self.object_controller.REPLICATE, req)
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_insufficient_storage(self):
conf = {'devices': self.testdir, 'mount_check': 'true'}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.object_controller.bytes_per_sync = 1
def fake_check_mount(*args, **kwargs):
return False
with mock.patch("swift.obj.diskfile.check_mount", fake_check_mount):
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_SSYNC_can_be_called(self):
req = Request.blank('/sda1/0',
environ={'REQUEST_METHOD': 'SSYNC'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_PUT_with_full_drive(self):
class IgnoredBody(object):
def __init__(self):
self.read_called = False
def read(self, size=-1):
if not self.read_called:
self.read_called = True
return 'VERIFY'
return ''
def fake_fallocate(fd, size):
raise OSError(errno.ENOSPC, os.strerror(errno.ENOSPC))
orig_fallocate = diskfile.fallocate
try:
diskfile.fallocate = fake_fallocate
timestamp = normalize_timestamp(time())
body_reader = IgnoredBody()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': body_reader},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'Expect': '100-continue'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
self.assertFalse(body_reader.read_called)
finally:
diskfile.fallocate = orig_fallocate
def test_global_conf_callback_does_nothing(self):
preloaded_app_conf = {}
global_conf = {}
object_server.global_conf_callback(preloaded_app_conf, global_conf)
self.assertEqual(preloaded_app_conf, {})
self.assertEqual(global_conf.keys(), ['replication_semaphore'])
try:
value = global_conf['replication_semaphore'][0].get_value()
except NotImplementedError:
# On some operating systems (at a minimum, OS X) it's not possible
# to introspect the value of a semaphore
raise SkipTest
else:
self.assertEqual(value, 4)
def test_global_conf_callback_replication_semaphore(self):
preloaded_app_conf = {'replication_concurrency': 123}
global_conf = {}
with mock.patch.object(
object_server.multiprocessing, 'BoundedSemaphore',
return_value='test1') as mocked_Semaphore:
object_server.global_conf_callback(preloaded_app_conf, global_conf)
self.assertEqual(preloaded_app_conf, {'replication_concurrency': 123})
self.assertEqual(global_conf, {'replication_semaphore': ['test1']})
mocked_Semaphore.assert_called_once_with(123)
def test_handling_of_replication_semaphore_config(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
objsrv = object_server.ObjectController(conf)
self.assertTrue(objsrv.replication_semaphore is None)
conf['replication_semaphore'] = ['sema']
objsrv = object_server.ObjectController(conf)
self.assertEqual(objsrv.replication_semaphore, 'sema')
def test_serv_reserv(self):
# Test replication_server flag was set from configuration file.
conf = {'devices': self.testdir, 'mount_check': 'false'}
self.assertEqual(
object_server.ObjectController(conf).replication_server, None)
for val in [True, '1', 'True', 'true']:
conf['replication_server'] = val
self.assertTrue(
object_server.ObjectController(conf).replication_server)
for val in [False, 0, '0', 'False', 'false', 'test_string']:
conf['replication_server'] = val
self.assertFalse(
object_server.ObjectController(conf).replication_server)
def test_list_allowed_methods(self):
# Test list of allowed_methods
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST']
repl_methods = ['REPLICATE', 'SSYNC']
for method_name in obj_methods:
method = getattr(self.object_controller, method_name)
self.assertFalse(hasattr(method, 'replication'))
for method_name in repl_methods:
method = getattr(self.object_controller, method_name)
self.assertEqual(method.replication, True)
def test_correct_allowed_method(self):
# Test correct work for allowed method using
# swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.app_factory(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
method_res = mock.MagicMock()
mock_method = public(lambda x:
mock.MagicMock(return_value=method_res))
with mock.patch.object(self.object_controller, method,
new=mock_method):
response = self.object_controller(env, start_response)
self.assertEqual(response, method_res)
def test_not_allowed_method(self):
# Test correct work for NOT allowed method using
# swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}, logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['<html><h1>Method Not Allowed</h1><p>The method is not '
'allowed for this resource.</p></html>']
mock_method = replication(public(lambda x: mock.MagicMock()))
with mock.patch.object(self.object_controller, method,
new=mock_method):
mock_method.replication = True
with mock.patch('time.gmtime',
mock.MagicMock(side_effect=[gmtime(10001.0)])):
with mock.patch('time.time',
mock.MagicMock(side_effect=[10000.0,
10001.0])):
with mock.patch('os.getpid',
mock.MagicMock(return_value=1234)):
response = self.object_controller.__call__(
env, start_response)
self.assertEqual(response, answer)
self.assertEqual(
self.logger.get_lines_for_level('info'),
['None - - [01/Jan/1970:02:46:41 +0000] "PUT'
' /sda1/p/a/c/o" 405 - "-" "-" "-" 1.0000 "-"'
' 1234 -'])
def test_call_incorrect_replication_method(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'true'}, logger=FakeLogger())
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST', 'OPTIONS']
for method in obj_methods:
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
self.object_controller(env, start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_not_utf8_and_not_logging_requests(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=FakeLogger())
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/\x00%20/%',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['Invalid UTF8 or contains NULL']
mock_method = public(lambda x: mock.MagicMock())
with mock.patch.object(self.object_controller, method,
new=mock_method):
response = self.object_controller.__call__(env, start_response)
self.assertEqual(response, answer)
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test__call__returns_500(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.logger = debug_logger('test')
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
@public
def mock_put_method(*args, **kwargs):
raise Exception()
with mock.patch.object(self.object_controller, method,
new=mock_put_method):
response = self.object_controller.__call__(env, start_response)
self.assertTrue(response[0].startswith(
'Traceback (most recent call last):'))
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR __call__ error with %(method)s %(path)s : ' % {
'method': 'PUT', 'path': '/sda1/p/a/c/o'},
])
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test_PUT_slow(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false',
'slow': '10'},
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
mock_method = public(lambda x: mock.MagicMock())
with mock.patch.object(self.object_controller, method,
new=mock_method):
with mock.patch('time.time',
mock.MagicMock(side_effect=[10000.0,
10001.0])):
with mock.patch('swift.obj.server.sleep',
mock.MagicMock()) as ms:
self.object_controller.__call__(env, start_response)
ms.assert_called_with(9)
self.assertEqual(self.logger.get_lines_for_level('info'),
[])
def test_log_line_format(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
self.object_controller.logger = self.logger
with mock.patch(
'time.gmtime', mock.MagicMock(side_effect=[gmtime(10001.0)])):
with mock.patch(
'time.time',
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0])):
with mock.patch(
'os.getpid', mock.MagicMock(return_value=1234)):
req.get_response(self.object_controller)
self.assertEqual(
self.logger.get_lines_for_level('info'),
['1.2.3.4 - - [01/Jan/1970:02:46:41 +0000] "HEAD /sda1/p/a/c/o" '
'404 - "-" "-" "-" 2.0000 "-" 1234 -'])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False)])
def test_dynamic_datadir(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Backend-Storage-Policy-Index': 1,
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects-1"
self.assertFalse(os.path.isdir(object_dir))
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertTrue(os.path.isdir(object_dir))
# make sure no idx in header uses policy 0 data_dir
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects"
self.assertFalse(os.path.isdir(object_dir))
with mock.patch.object(POLICIES, 'get_by_index',
lambda _: True):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertTrue(os.path.isdir(object_dir))
def test_storage_policy_index_is_validated(self):
# sanity check that index for existing policy is ok
ts = (utils.Timestamp(t).internal for t in
itertools.count(int(time())))
methods = ('PUT', 'POST', 'GET', 'HEAD', 'REPLICATE', 'DELETE')
valid_indices = sorted([int(policy) for policy in POLICIES])
for index in valid_indices:
object_dir = self.testdir + "/sda1/objects"
if index > 0:
object_dir = "%s-%s" % (object_dir, index)
self.assertFalse(os.path.isdir(object_dir))
for method in methods:
headers = {
'X-Timestamp': next(ts),
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': index}
if POLICIES[index].policy_type == EC_POLICY:
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': method},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertTrue(is_success(resp.status_int),
'%s method failed: %r' % (method, resp.status))
# index for non-existent policy should return 503
index = valid_indices[-1] + 1
for method in methods:
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': method},
headers={
'X-Timestamp': next(ts),
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': index})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects-%s" % index
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 503)
self.assertFalse(os.path.isdir(object_dir))
def test_race_doesnt_quarantine(self):
existing_timestamp = normalize_timestamp(time())
delete_timestamp = normalize_timestamp(time() + 1)
put_timestamp = normalize_timestamp(time() + 2)
# make a .ts
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': existing_timestamp})
req.get_response(self.object_controller)
# force a PUT between the listdir and read_metadata of a DELETE
put_once = [False]
orig_listdir = os.listdir
def mock_listdir(path):
listing = orig_listdir(path)
if not put_once[0]:
put_once[0] = True
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'Content-Length': '9',
'Content-Type': 'application/octet-stream'})
req.body = 'some data'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
return listing
with mock.patch('os.listdir', mock_listdir):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
qdir = os.path.join(self.testdir, 'sda1', 'quarantined')
self.assertFalse(os.path.exists(qdir))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'], put_timestamp)
def test_multiphase_put_draining(self):
# We want to ensure that we read the whole response body even if
# it's multipart MIME and there's document parts that we don't
# expect or understand. This'll help save our bacon if we ever jam
# more stuff in there.
in_a_timeout = [False]
# inherit from BaseException so we get a stack trace when the test
# fails instead of just a 500
class NotInATimeout(BaseException):
pass
class FakeTimeout(BaseException):
def __enter__(self):
in_a_timeout[0] = True
def __exit__(self, typ, value, tb):
in_a_timeout[0] = False
class PickyWsgiBytesIO(WsgiBytesIO):
def read(self, *a, **kw):
if not in_a_timeout[0]:
raise NotInATimeout()
return WsgiBytesIO.read(self, *a, **kw)
def readline(self, *a, **kw):
if not in_a_timeout[0]:
raise NotInATimeout()
return WsgiBytesIO.readline(self, *a, **kw)
test_data = 'obj data'
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "7",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
"--boundary123--"
))
if six.PY3:
test_doc = test_doc.encode('utf-8')
# phase1 - PUT request with object metadata in footer and
# multiphase commit conversation
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
}
wsgi_input = PickyWsgiBytesIO(test_doc)
req = Request.blank(
"/sda1/0/a/c/o",
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': wsgi_input},
headers=headers)
app = object_server.ObjectController(self.conf, logger=self.logger)
with mock.patch('swift.obj.server.ChunkReadTimeout', FakeTimeout):
resp = req.get_response(app)
self.assertEqual(resp.status_int, 201) # sanity check
in_a_timeout[0] = True # so we can check without an exception
self.assertEqual(wsgi_input.read(), '') # we read all the bytes
@patch_policies(test_policies)
class TestObjectServer(unittest.TestCase):
def setUp(self):
# dirs
self.tmpdir = tempfile.mkdtemp()
self.tempdir = os.path.join(self.tmpdir, 'tmp_test_obj_server')
self.devices = os.path.join(self.tempdir, 'srv/node')
for device in ('sda1', 'sdb1'):
os.makedirs(os.path.join(self.devices, device))
self.conf = {
'devices': self.devices,
'swift_dir': self.tempdir,
'mount_check': 'false',
}
self.logger = debug_logger('test-object-server')
self.app = object_server.ObjectController(
self.conf, logger=self.logger)
sock = listen(('127.0.0.1', 0))
self.server = spawn(wsgi.server, sock, self.app, utils.NullLogger())
self.port = sock.getsockname()[1]
def tearDown(self):
rmtree(self.tmpdir)
def test_not_found(self):
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'GET', '/a/c/o')
resp = conn.getresponse()
self.assertEqual(resp.status, 404)
resp.read()
resp.close()
def test_expect_on_put(self):
test_body = 'test'
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'X-Timestamp': utils.Timestamp(time()).internal,
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
conn.send(test_body)
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
def test_expect_on_put_footer(self):
test_body = 'test'
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'X-Timestamp': utils.Timestamp(time()).internal,
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Obj-Metadata-Footer'], 'yes')
resp.close()
def test_expect_on_put_conflict(self):
test_body = 'test'
put_timestamp = utils.Timestamp(time())
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'X-Timestamp': put_timestamp.internal,
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
conn.send(test_body)
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# and again with same timestamp
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 409)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Backend-Timestamp'], put_timestamp)
resp.read()
resp.close()
def test_multiphase_put_no_mime_boundary(self):
test_data = 'obj data'
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 400)
resp.read()
resp.close()
def test_expect_on_multiphase_put_diconnect(self):
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': 0,
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Obj-Multiphase-Commit'], 'yes')
conn.send('c\r\n--boundary123\r\n')
# disconnect client
conn.sock.fd._sock.close()
for i in range(2):
sleep(0)
self.assertFalse(self.logger.get_lines_for_level('error'))
for line in self.logger.get_lines_for_level('info'):
self.assertIn(' 499 ', line)
def find_files(self):
found_files = defaultdict(list)
for root, dirs, files in os.walk(self.devices):
for filename in files:
_name, ext = os.path.splitext(filename)
file_path = os.path.join(root, filename)
found_files[ext].append(file_path)
return found_files
@contextmanager
def _check_multiphase_put_commit_handling(self,
test_doc=None,
headers=None,
finish_body=True):
"""
This helper will setup a multiphase chunked PUT request and yield at
the context at the commit phase (after getting the second expect-100
continue response.
It can setup a resonable stub request, but you can over-ride some
characteristics of the request via kwargs.
:param test_doc: first part of the mime conversation before the object
server will send the 100-continue, this includes the
object body
:param headers: headers to send along with the initial request; some
object-metadata (e.g. X-Backend-Obj-Content-Length)
is generally expected tomatch the test_doc)
:param finish_body: boolean, if true send "0\r\n\r\n" after test_doc
and wait for 100-continue before yeilding context
"""
test_data = 'obj data'
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
test_doc = test_doc or "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
"--boundary123",
))
# phase1 - PUT request with object metadata in footer and
# multiphase commit conversation
put_timestamp = utils.Timestamp(time())
headers = headers or {
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
put_timestamp = utils.Timestamp(headers.setdefault(
'X-Timestamp', utils.Timestamp(time()).internal))
container_update = \
'swift.obj.server.ObjectController.container_update'
with mock.patch(container_update) as _container_update:
conn = bufferedhttp.http_connect(
'127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
expect_headers = HeaderKeyDict(resp.getheaders())
to_send = "%x\r\n%s\r\n" % (len(test_doc), test_doc)
conn.send(to_send)
if finish_body:
conn.send("0\r\n\r\n")
# verify 100-continue response to mark end of phase1
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
# yield relevant context for test
yield {
'conn': conn,
'expect_headers': expect_headers,
'put_timestamp': put_timestamp,
'mock_container_update': _container_update,
}
# give the object server a little time to trampoline enough to
# recognize request has finished, or socket has closed or whatever
sleep(0.1)
def test_multiphase_put_client_disconnect_right_before_commit(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# just bail stright out
conn.sock.fd._sock.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data and durable state file write
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but .durable isn't
self.assertEqual(found_files['.durable'], [])
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_client_disconnect_in_the_middle_of_commit(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# start commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
# but don't quite the commit body
to_send = "%x\r\n%s" % \
(len(commit_confirmation_doc), commit_confirmation_doc[:-1])
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data and durable state file write
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but .durable isn't
self.assertEqual(found_files['.durable'], [])
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_no_metadata_replicated(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, headers=headers) as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
# N.B. no X-Obj-Metadata-Footer header
self.assertNotIn('X-Obj-Metadata-Footer', expect_headers)
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# replicated objects do not have a .durable file
self.assertEqual(found_files['.durable'], [])
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_metadata_footer(self):
with self._check_multiphase_put_commit_handling() as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
self.assertEqual(expect_headers['X-Obj-Metadata-Footer'], 'yes')
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# .durable file is there
self.assertEqual(len(found_files['.durable']), 1)
durable_file = found_files['.durable'][0]
self.assertEqual("%s.durable" % put_timestamp.internal,
os.path.basename(durable_file))
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_metadata_footer_disconnect(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
# eventlet.wsgi won't return < network_chunk_size from a chunked read
self.app.network_chunk_size = 16
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, finish_body=False) as context:
conn = context['conn']
# make footer doc
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
# send most of the footer doc
footer_doc = "\r\n".join((
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
))
# but don't send final boundary nor last chunk
to_send = "%x\r\n%s\r\n" % \
(len(footer_doc), footer_doc)
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# no artifacts left on disk
found_files = self.find_files()
self.assertEqual(len(found_files['.data']), 0)
self.assertEqual(len(found_files['.durable']), 0)
# ... and no container update
_container_update = context['mock_container_update']
self.assertFalse(_container_update.called)
def test_multiphase_put_ec_fragment_in_headers_no_footers(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
# phase1 - PUT request with multiphase commit conversation
# no object metadata in footer
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
# normally the frag index gets sent in the MIME footer (which this
# test doesn't have, see `test_multiphase_put_metadata_footer`),
# but the proxy *could* send the frag index in the headers and
# this test verifies that would work.
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, headers=headers) as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
# N.B. no X-Obj-Metadata-Footer header
self.assertNotIn('X-Obj-Metadata-Footer', expect_headers)
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# .durable file is there
self.assertEqual(len(found_files['.durable']), 1)
durable_file = found_files['.durable'][0]
self.assertEqual("%s.durable" % put_timestamp.internal,
os.path.basename(durable_file))
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_bad_commit_message(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"junkjunk",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
resp = conn.getresponse()
self.assertEqual(resp.status, 500)
resp.read()
resp.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# verify that durable file was NOT created
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but .durable isn't
self.assertEqual(found_files['.durable'], [])
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_drains_extra_commit_junk(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
# make another request to validate the HTTP protocol state
conn.putrequest('GET', '/sda1/0/a/c/o')
conn.putheader('X-Backend-Storage-Policy-Index', '1')
conn.endheaders()
resp = conn.getresponse()
self.assertEqual(resp.status, 200)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# .durable file is there
self.assertEqual(len(found_files['.durable']), 1)
durable_file = found_files['.durable'][0]
self.assertEqual("%s.durable" % put_timestamp.internal,
os.path.basename(durable_file))
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_drains_extra_commit_junk_disconnect(self):
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
))
# eventlet.wsgi won't return < network_chunk_size from a chunked read
self.app.network_chunk_size = 16
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation and some other stuff
# but don't send final boundary or last chunk
to_send = "%x\r\n%s\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# ... and .durable is there
self.assertEqual(len(found_files['.durable']), 1)
durable_file = found_files['.durable'][0]
self.assertEqual("%s.durable" % put_timestamp.internal,
os.path.basename(durable_file))
# but no container update
self.assertFalse(context['mock_container_update'].called)
@patch_policies
class TestZeroCopy(unittest.TestCase):
"""Test the object server's zero-copy functionality"""
def _system_can_zero_copy(self):
if not splice.available:
return False
try:
utils.get_md5_socket()
except IOError:
return False
return True
def setUp(self):
if not self._system_can_zero_copy():
raise SkipTest("zero-copy support is missing")
self.testdir = mkdtemp(suffix="obj_server_zero_copy")
mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
conf = {'devices': self.testdir,
'mount_check': 'false',
'splice': 'yes',
'disk_chunk_size': '4096'}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.df_mgr = diskfile.DiskFileManager(
conf, self.object_controller.logger)
listener = listen(('localhost', 0))
port = listener.getsockname()[1]
self.wsgi_greenlet = spawn(
wsgi.server, listener, self.object_controller, NullLogger())
self.http_conn = httplib.HTTPConnection('127.0.0.1', port)
self.http_conn.connect()
def tearDown(self):
"""Tear down for testing swift.object.server.ObjectController"""
self.wsgi_greenlet.kill()
rmtree(self.testdir)
def test_GET(self):
url_path = '/sda1/2100/a/c/o'
self.http_conn.request('PUT', url_path, 'obj contents',
{'X-Timestamp': '127082564.24709'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, 'obj contents')
def test_GET_big(self):
# Test with a large-ish object to make sure we handle full socket
# buffers correctly.
obj_contents = 'A' * 4 * 1024 * 1024 # 4 MiB
url_path = '/sda1/2100/a/c/o'
self.http_conn.request('PUT', url_path, obj_contents,
{'X-Timestamp': '1402600322.52126'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, obj_contents)
def test_quarantine(self):
obj_hash = hash_path('a', 'c', 'o')
url_path = '/sda1/2100/a/c/o'
ts = '1402601849.47475'
self.http_conn.request('PUT', url_path, 'obj contents',
{'X-Timestamp': ts})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
# go goof up the file on disk
fname = os.path.join(self.testdir, 'sda1', 'objects', '2100',
obj_hash[-3:], obj_hash, ts + '.data')
with open(fname, 'rb+') as fh:
fh.write('XYZ')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, 'XYZ contents')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
# it was quarantined by the previous request
self.assertEqual(response.status, 404)
response.read()
def test_quarantine_on_well_formed_zero_byte_file(self):
# Make sure we work around an oddity in Linux's hash sockets
url_path = '/sda1/2100/a/c/o'
ts = '1402700497.71333'
self.http_conn.request(
'PUT', url_path, '',
{'X-Timestamp': ts, 'Content-Length': '0'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, '')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200) # still there
contents = response.read()
self.assertEqual(contents, '')
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "78df73f238da1de6d51642e25aa075f9",
"timestamp": "",
"source": "github",
"line_count": 6140,
"max_line_length": 79,
"avg_line_length": 44.10732899022801,
"alnum_prop": 0.5234603185153184,
"repo_name": "mjwtom/swift",
"id": "351c98642b3faab4796130df11ccc52efe23258d",
"size": "271432",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/unit/obj/test_server.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6824999"
},
{
"name": "Shell",
"bytes": "2278"
}
],
"symlink_target": ""
} |
from tests import BaseTestCase
class StaticDataManagerTestCase(BaseTestCase):
def setUp(self):
from riotpy import RiotApi
from riotpy.constants import Region
self.api = RiotApi(api_key=self.get_test_api_key(), region=Region.BR)
def test_get_champions(self):
with self.vcr():
champions = self.api.static_data.get_champions()
self.assertEqual(len(champions), 126)
def test_get_champion(self):
with self.vcr():
mordekaiser = self.api.static_data.get_champion(82)
self.assertEqual(mordekaiser.name, 'Mordekaiser')
def test_get_items(self):
with self.vcr():
items = self.api.static_data.get_items()
self.assertEqual(len(items), 287)
def test_get_item(self):
with self.vcr():
giants_belt = self.api.static_data.get_item(1011)
self.assertEqual(giants_belt.name, u'Cinto do Gigante')
| {
"content_hash": "e14042311d2c119761526a6b95136273",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 77,
"avg_line_length": 28.575757575757574,
"alnum_prop": 0.6362672322375398,
"repo_name": "jarussi/riotpy",
"id": "e3516e702aee3dedd29f6f4d02370f20beadf4a1",
"size": "959",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_static_data.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1165"
},
{
"name": "Python",
"bytes": "43513"
}
],
"symlink_target": ""
} |
"""Tests for Keras Premade WideNDeep models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.feature_column import dense_features_v2
from tensorflow.python.feature_column import feature_column_v2 as fc
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.engine import input_layer
from tensorflow.python.keras.engine import sequential
from tensorflow.python.keras.engine import training
from tensorflow.python.keras.layers import core
from tensorflow.python.keras.optimizer_v2 import gradient_descent
from tensorflow.python.keras.premade import linear
from tensorflow.python.keras.premade import wide_deep
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class WideDeepModelTest(keras_parameterized.TestCase):
def test_wide_deep_model(self):
linear_model = linear.LinearModel(units=1)
dnn_model = sequential.Sequential([core.Dense(units=1, input_dim=3)])
wide_deep_model = wide_deep.WideDeepModel(linear_model, dnn_model)
linear_inp = np.random.uniform(low=-5, high=5, size=(64, 2))
dnn_inp = np.random.uniform(low=-5, high=5, size=(64, 3))
inputs = [linear_inp, dnn_inp]
output = .3 * linear_inp[:, 0] + .2 * dnn_inp[:, 1]
wide_deep_model.compile(
optimizer=['sgd', 'adam'],
loss='mse',
metrics=[],
run_eagerly=testing_utils.should_run_eagerly())
wide_deep_model.fit(inputs, output, epochs=5)
self.assertTrue(wide_deep_model.built)
def test_wide_deep_model_backprop(self):
with self.cached_session():
linear_model = linear.LinearModel(units=1, kernel_initializer='zeros')
dnn_model = sequential.Sequential(
[core.Dense(units=1, kernel_initializer='zeros')])
wide_deep_model = wide_deep.WideDeepModel(linear_model, dnn_model)
linear_inp = np.array([1.])
dnn_inp = np.array([1.])
inputs = [linear_inp, dnn_inp]
output = linear_inp + 2 * dnn_inp
linear_opt = gradient_descent.SGD(learning_rate=.1)
dnn_opt = gradient_descent.SGD(learning_rate=.3)
wide_deep_model.compile(
optimizer=[linear_opt, dnn_opt],
loss='mse',
metrics=[],
run_eagerly=testing_utils.should_run_eagerly())
self.evaluate(variables.global_variables_initializer())
wide_deep_model.fit(inputs, output, epochs=1)
self.assertAllClose(
[[0.6]],
self.evaluate(wide_deep_model.linear_model.dense_layers[0].kernel))
self.assertAllClose([[1.8]],
self.evaluate(
wide_deep_model.dnn_model.layers[0].kernel))
def test_wide_deep_model_with_single_input(self):
linear_model = linear.LinearModel(units=1)
dnn_model = sequential.Sequential([core.Dense(units=1, input_dim=3)])
wide_deep_model = wide_deep.WideDeepModel(linear_model, dnn_model)
inputs = np.random.uniform(low=-5, high=5, size=(64, 3))
output = .3 * inputs[:, 0]
wide_deep_model.compile(
optimizer=['sgd', 'adam'],
loss='mse',
metrics=[],
run_eagerly=testing_utils.should_run_eagerly())
wide_deep_model.fit(inputs, output, epochs=5)
def test_wide_deep_model_with_multi_outputs(self):
with context.eager_mode():
inp = input_layer.Input(shape=(1,), name='linear')
l = linear.LinearModel(units=2, use_bias=False)(inp)
l1, l2 = array_ops.split(l, num_or_size_splits=2, axis=1)
linear_model = training.Model(inp, [l1, l2])
linear_model.set_weights([np.asarray([[0.5, 0.3]])])
h = core.Dense(units=2, use_bias=False)(inp)
h1, h2 = array_ops.split(h, num_or_size_splits=2, axis=1)
dnn_model = training.Model(inp, [h1, h2])
dnn_model.set_weights([np.asarray([[0.1, -0.5]])])
wide_deep_model = wide_deep.WideDeepModel(linear_model, dnn_model)
inp_np = np.asarray([[1.]])
out1, out2 = wide_deep_model(inp_np)
# output should be (0.5 + 0.1), and (0.3 - 0.5)
self.assertAllClose([[0.6]], out1)
self.assertAllClose([[-0.2]], out2)
wide_deep_model = wide_deep.WideDeepModel(
linear_model, dnn_model, activation='relu')
out1, out2 = wide_deep_model(inp_np)
# output should be relu((0.5 + 0.1)), and relu((0.3 - 0.5))
self.assertAllClose([[0.6]], out1)
self.assertAllClose([[0.]], out2)
def test_wide_deep_model_with_single_optimizer(self):
linear_model = linear.LinearModel(units=1)
dnn_model = sequential.Sequential([core.Dense(units=1, input_dim=3)])
wide_deep_model = wide_deep.WideDeepModel(linear_model, dnn_model)
linear_inp = np.random.uniform(low=-5, high=5, size=(64, 2))
dnn_inp = np.random.uniform(low=-5, high=5, size=(64, 3))
inputs = [linear_inp, dnn_inp]
output = .3 * linear_inp[:, 0] + .2 * dnn_inp[:, 1]
wide_deep_model.compile(
optimizer='sgd',
loss='mse',
metrics=[],
run_eagerly=testing_utils.should_run_eagerly())
wide_deep_model.fit(inputs, output, epochs=5)
self.assertTrue(wide_deep_model.built)
def test_wide_deep_model_as_layer(self):
linear_model = linear.LinearModel(units=1)
dnn_model = sequential.Sequential([core.Dense(units=1)])
linear_input = input_layer.Input(shape=(3,), name='linear')
dnn_input = input_layer.Input(shape=(5,), name='dnn')
wide_deep_model = wide_deep.WideDeepModel(linear_model, dnn_model)
wide_deep_output = wide_deep_model((linear_input, dnn_input))
input_b = input_layer.Input(shape=(1,), name='b')
output_b = core.Dense(units=1)(input_b)
model = training.Model(
inputs=[linear_input, dnn_input, input_b],
outputs=[wide_deep_output + output_b])
linear_input_np = np.random.uniform(low=-5, high=5, size=(64, 3))
dnn_input_np = np.random.uniform(low=-5, high=5, size=(64, 5))
input_b_np = np.random.uniform(low=-5, high=5, size=(64,))
output_np = linear_input_np[:, 0] + .2 * dnn_input_np[:, 1] + input_b_np
model.compile(
optimizer='sgd',
loss='mse',
metrics=[],
run_eagerly=testing_utils.should_run_eagerly())
model.fit([linear_input_np, dnn_input_np, input_b_np], output_np, epochs=5)
def test_wide_deep_model_with_sub_model_trained(self):
linear_model = linear.LinearModel(units=1)
dnn_model = sequential.Sequential([core.Dense(units=1, input_dim=3)])
wide_deep_model = wide_deep.WideDeepModel(
linear.LinearModel(units=1),
sequential.Sequential([core.Dense(units=1, input_dim=3)]))
linear_inp = np.random.uniform(low=-5, high=5, size=(64, 2))
dnn_inp = np.random.uniform(low=-5, high=5, size=(64, 3))
inputs = [linear_inp, dnn_inp]
output = .3 * linear_inp[:, 0] + .2 * dnn_inp[:, 1]
linear_model.compile(
optimizer='sgd',
loss='mse',
metrics=[],
run_eagerly=testing_utils.should_run_eagerly())
dnn_model.compile(
optimizer='adam',
loss='mse',
metrics=[],
run_eagerly=testing_utils.should_run_eagerly())
linear_model.fit(linear_inp, output, epochs=50)
dnn_model.fit(dnn_inp, output, epochs=50)
wide_deep_model.compile(
optimizer=['sgd', 'adam'],
loss='mse',
metrics=[],
run_eagerly=testing_utils.should_run_eagerly())
wide_deep_model.fit(inputs, output, epochs=50)
# This test is an example for cases where linear and dnn model accepts
# same raw input and same transformed inputs, i.e., the raw input is
# categorical, and both linear and dnn model accept one hot encoding.
def test_wide_deep_model_with_single_feature_column(self):
vocab_list = ['alpha', 'beta', 'gamma']
vocab_val = [0.4, 0.6, 0.9]
data = np.random.choice(vocab_list, size=256)
y = np.zeros_like(data, dtype=np.float32)
for vocab, val in zip(vocab_list, vocab_val):
indices = np.where(data == vocab)
y[indices] = val + np.random.uniform(
low=-0.01, high=0.01, size=indices[0].shape)
cat_column = fc.categorical_column_with_vocabulary_list(
key='symbol', vocabulary_list=vocab_list)
ind_column = fc.indicator_column(cat_column)
dense_feature_layer = dense_features_v2.DenseFeatures([ind_column])
linear_model = linear.LinearModel(
use_bias=False, kernel_initializer='zeros')
dnn_model = sequential.Sequential([core.Dense(units=1)])
wide_deep_model = wide_deep.WideDeepModel(linear_model, dnn_model)
combined = sequential.Sequential([dense_feature_layer, wide_deep_model])
opt = gradient_descent.SGD(learning_rate=0.1)
combined.compile(
opt,
'mse', [],
run_eagerly=testing_utils.should_run_eagerly())
combined.fit(x={'symbol': data}, y=y, batch_size=32, epochs=10)
# This test is an example for cases where linear and dnn model accepts
# same raw input but different transformed inputs, i.e,. the raw input is
# categorical, and linear model accepts one hot encoding, while dnn model
# accepts embedding encoding.
def test_wide_deep_model_with_two_feature_columns(self):
vocab_list = ['alpha', 'beta', 'gamma']
vocab_val = [0.4, 0.6, 0.9]
data = np.random.choice(vocab_list, size=256)
y = np.zeros_like(data, dtype=np.float32)
for vocab, val in zip(vocab_list, vocab_val):
indices = np.where(data == vocab)
y[indices] = val + np.random.uniform(
low=-0.01, high=0.01, size=indices[0].shape)
cat_column = fc.categorical_column_with_vocabulary_list(
key='symbol', vocabulary_list=vocab_list)
ind_column = fc.indicator_column(cat_column)
emb_column = fc.embedding_column(cat_column, dimension=5)
linear_feature_layer = dense_features_v2.DenseFeatures([ind_column])
linear_model = linear.LinearModel(
use_bias=False, kernel_initializer='zeros')
combined_linear = sequential.Sequential(
[linear_feature_layer, linear_model])
dnn_model = sequential.Sequential([core.Dense(units=1)])
dnn_feature_layer = dense_features_v2.DenseFeatures([emb_column])
combined_dnn = sequential.Sequential([dnn_feature_layer, dnn_model])
wide_deep_model = wide_deep.WideDeepModel(combined_linear, combined_dnn)
opt = gradient_descent.SGD(learning_rate=0.1)
wide_deep_model.compile(
opt,
'mse', [],
run_eagerly=testing_utils.should_run_eagerly())
wide_deep_model.fit(x={'symbol': data}, y=y, batch_size=32, epochs=10)
def test_config(self):
linear_model = linear.LinearModel(units=1)
dnn_model = sequential.Sequential([core.Dense(units=1, input_dim=3)])
wide_deep_model = wide_deep.WideDeepModel(linear_model, dnn_model)
config = wide_deep_model.get_config()
cloned_wide_deep_model = wide_deep.WideDeepModel.from_config(config)
self.assertEqual(linear_model.units,
cloned_wide_deep_model.linear_model.units)
self.assertEqual(dnn_model.layers[0].units,
cloned_wide_deep_model.dnn_model.layers[0].units)
def test_config_with_custom_objects(self):
def my_activation(x):
return x
linear_model = linear.LinearModel(units=1)
dnn_model = sequential.Sequential([core.Dense(units=1, input_dim=3)])
wide_deep_model = wide_deep.WideDeepModel(
linear_model, dnn_model, activation=my_activation)
config = wide_deep_model.get_config()
cloned_wide_deep_model = wide_deep.WideDeepModel.from_config(
config, custom_objects={'my_activation': my_activation})
self.assertEqual(cloned_wide_deep_model.activation, my_activation)
if __name__ == '__main__':
test.main()
| {
"content_hash": "0a8b44fe41ccb08769f77cab5a2e06fc",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 79,
"avg_line_length": 44.864150943396226,
"alnum_prop": 0.6618723189502902,
"repo_name": "xzturn/tensorflow",
"id": "12d569331bf6d7efc8c557e20870ce725d5e355c",
"size": "12578",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/premade/wide_deep_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "31572"
},
{
"name": "Batchfile",
"bytes": "55269"
},
{
"name": "C",
"bytes": "905803"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "82703929"
},
{
"name": "CMake",
"bytes": "6967"
},
{
"name": "Dockerfile",
"bytes": "113964"
},
{
"name": "Go",
"bytes": "1871773"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "1048075"
},
{
"name": "Jupyter Notebook",
"bytes": "550861"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "2139315"
},
{
"name": "Makefile",
"bytes": "66796"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "319649"
},
{
"name": "PHP",
"bytes": "4236"
},
{
"name": "Pascal",
"bytes": "318"
},
{
"name": "Pawn",
"bytes": "20422"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "38461380"
},
{
"name": "RobotFramework",
"bytes": "1779"
},
{
"name": "Roff",
"bytes": "2705"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "SWIG",
"bytes": "6846"
},
{
"name": "Shell",
"bytes": "696334"
},
{
"name": "Smarty",
"bytes": "35725"
},
{
"name": "Starlark",
"bytes": "3678649"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
import os
import sys
import operator
import time
import logging
import enso.config
from enso.messages import displayMessage
def cmd_enso(ensoapi, cmd):
""" Enso system command """
if cmd == 'quit':
enso.stop()
elif cmd == 'about':
displayMessage(enso.config.ABOUT_BOX_XML)
elif cmd == "commands":
ensoapi.display_message("Enso commands", "enso")
cmd_enso.valid_args = ['about', 'help', 'quit', 'commands']
# vim:set tabstop=4 shiftwidth=4 expandtab:
| {
"content_hash": "1408839699c77df8ffe618b6c841f31a",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 59,
"avg_line_length": 23.5,
"alnum_prop": 0.6382978723404256,
"repo_name": "tartakynov/enso",
"id": "92e1f7eaa328869b105529e0c48a1a417765ba89",
"size": "517",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/commands/enso.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "9734342"
},
{
"name": "C++",
"bytes": "403798"
},
{
"name": "JavaScript",
"bytes": "3338"
},
{
"name": "Objective-C",
"bytes": "15094"
},
{
"name": "Python",
"bytes": "765642"
},
{
"name": "Shell",
"bytes": "235"
}
],
"symlink_target": ""
} |
import random
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc
from sqlalchemy.orm import joinedload
from neutron.common import constants as q_const
from neutron.common import utils as n_utils
from neutron.db import agents_db
from neutron.db import l3_agentschedulers_db as l3agent_sch_db
from neutron.db import model_base
from neutron.db import models_v2
from neutron.openstack.common import log as logging
from neutron.plugins.ml2 import db as ml2_db
LOG = logging.getLogger(__name__)
class CentralizedSnatL3AgentBinding(model_base.BASEV2):
"""Represents binding between Neutron Centralized SNAT and L3 agents."""
__tablename__ = "csnat_l3_agent_bindings"
router_id = sa.Column(sa.String(36),
sa.ForeignKey("routers.id", ondelete='CASCADE'),
primary_key=True)
l3_agent_id = sa.Column(sa.String(36),
sa.ForeignKey("agents.id", ondelete='CASCADE'),
nullable=False)
host_id = sa.Column(sa.String(255))
csnat_gw_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'))
l3_agent = orm.relationship(agents_db.Agent)
csnat_gw_port = orm.relationship(models_v2.Port)
class L3_DVRsch_db_mixin(l3agent_sch_db.L3AgentSchedulerDbMixin):
"""Mixin class for L3 DVR scheduler.
DVR currently supports the following use cases:
- East/West (E/W) traffic between VMs: this is handled in a
distributed manner across Compute Nodes without a centralized element.
This includes E/W traffic between VMs on the same Compute Node.
- North/South traffic for Floating IPs (FIP N/S): this is supported on the
distributed routers on Compute Nodes without any centralized element.
- North/South traffic for SNAT (SNAT N/S): this is supported via a
centralized element that handles the SNAT traffic.
To support these use cases, DVR routers rely on an L3 agent that runs on a
central node (also known as Network Node or Service Node), as well as, L3
agents that run individually on each Compute Node of an OpenStack cloud.
Each L3 agent creates namespaces to route traffic according to the use
cases outlined above. The mechanism adopted for creating and managing
these namespaces is via (Router, Agent) binding and Scheduling in general.
The main difference between distributed routers and centralized ones is
that in the distributed case, multiple bindings will exist, one for each
of the agents participating in the routed topology for the specific router.
These bindings are created in the following circumstances:
- A subnet is added to a router via router-interface-add, and that subnet
has running VM's deployed in it. A binding will be created between the
router and any L3 agent whose Compute Node is hosting the VM(s).
- An external gateway is set to a router via router-gateway-set. A binding
will be created between the router and the L3 agent running centrally
on the Network Node.
Therefore, any time a router operation occurs (create, update or delete),
scheduling will determine whether the router needs to be associated to an
L3 agent, just like a regular centralized router, with the difference that,
in the distributed case, the bindings required are established based on
the state of the router and the Compute Nodes.
"""
def dvr_update_router_addvm(self, context, port):
ips = port['fixed_ips']
for ip in ips:
subnet = ip['subnet_id']
filter_sub = {'fixed_ips': {'subnet_id': [subnet]},
'device_owner':
[q_const.DEVICE_OWNER_DVR_INTERFACE]}
router_id = None
ports = self._core_plugin.get_ports(context, filters=filter_sub)
for port in ports:
router_id = port['device_id']
router_dict = self.get_router(context, router_id)
if router_dict.get('distributed', False):
payload = {'subnet_id': subnet}
self.l3_rpc_notifier.routers_updated(
context, [router_id], None, payload)
break
LOG.debug('DVR: dvr_update_router_addvm %s ', router_id)
def get_dvr_routers_by_portid(self, context, port_id):
"""Gets the dvr routers on vmport subnets."""
router_ids = set()
port_dict = self._core_plugin.get_port(context, port_id)
fixed_ips = port_dict['fixed_ips']
for fixedip in fixed_ips:
vm_subnet = fixedip['subnet_id']
filter_sub = {'fixed_ips': {'subnet_id': [vm_subnet]},
'device_owner':
[q_const.DEVICE_OWNER_DVR_INTERFACE]}
subnet_ports = self._core_plugin.get_ports(
context, filters=filter_sub)
for subnet_port in subnet_ports:
router_ids.add(subnet_port['device_id'])
return router_ids
def get_subnet_ids_on_router(self, context, router_id):
"""Return subnet IDs for interfaces attached to the given router."""
subnet_ids = set()
filter_rtr = {'device_id': [router_id]}
int_ports = self._core_plugin.get_ports(context, filters=filter_rtr)
for int_port in int_ports:
int_ips = int_port['fixed_ips']
int_subnet = int_ips[0]['subnet_id']
subnet_ids.add(int_subnet)
return subnet_ids
def check_ports_active_on_host_and_subnet(self, context, host,
port_id, subnet_id):
"""Check if there is any dvr serviceable port on the subnet_id."""
filter_sub = {'fixed_ips': {'subnet_id': [subnet_id]}}
ports = self._core_plugin.get_ports(context, filters=filter_sub)
for port in ports:
if (n_utils.is_dvr_serviced(port['device_owner'])
and port['status'] == 'ACTIVE'
and port['binding:host_id'] == host
and port['id'] != port_id):
LOG.debug('DVR: Active port exists for subnet %(subnet_id)s '
'on host %(host)s', {'subnet_id': subnet_id,
'host': host})
return True
return False
def dvr_deletens_if_no_port(self, context, port_id):
"""Delete the DVR namespace if no dvr serviced port exists."""
admin_context = context.elevated()
router_ids = self.get_dvr_routers_by_portid(admin_context, port_id)
port_host = ml2_db.get_port_binding_host(port_id)
if not router_ids:
LOG.debug('No namespaces available for this DVR port %(port)s '
'on host %(host)s', {'port': port_id,
'host': port_host})
return []
removed_router_info = []
for router_id in router_ids:
subnet_ids = self.get_subnet_ids_on_router(admin_context,
router_id)
port_exists_on_subnet = False
for subnet in subnet_ids:
if self.check_ports_active_on_host_and_subnet(admin_context,
port_host,
port_id,
subnet):
port_exists_on_subnet = True
break
if port_exists_on_subnet:
continue
filter_rtr = {'device_id': [router_id],
'device_owner':
[q_const.DEVICE_OWNER_DVR_INTERFACE]}
int_ports = self._core_plugin.get_ports(
admin_context, filters=filter_rtr)
for prt in int_ports:
dvr_binding = (ml2_db.
get_dvr_port_binding_by_host(context.session,
prt['id'],
port_host))
if dvr_binding:
# unbind this port from router
dvr_binding['router_id'] = None
dvr_binding.update(dvr_binding)
agent = self._get_agent_by_type_and_host(context,
q_const.AGENT_TYPE_L3,
port_host)
info = {'router_id': router_id, 'host': port_host,
'agent_id': str(agent.id)}
removed_router_info.append(info)
LOG.debug('Router namespace %(router_id)s on host %(host)s '
'to be deleted', info)
return removed_router_info
def bind_snat_router(self, context, router_id, chosen_agent):
"""Bind the router to the chosen l3 agent."""
with context.session.begin(subtransactions=True):
binding = CentralizedSnatL3AgentBinding()
binding.l3_agent = chosen_agent
binding.router_id = router_id
context.session.add(binding)
LOG.debug('SNAT Router %(router_id)s is scheduled to L3 agent '
'%(agent_id)s', {'router_id': router_id,
'agent_id': chosen_agent.id})
def bind_dvr_router_servicenode(self, context, router_id,
chosen_snat_agent):
"""Bind the IR router to service node if not already hosted."""
query = (context.session.query(l3agent_sch_db.RouterL3AgentBinding).
filter_by(router_id=router_id))
for bind in query:
if bind.l3_agent_id == chosen_snat_agent.id:
LOG.debug('Distributed Router %(router_id)s already hosted '
'on snat l3_agent %(snat_id)s',
{'router_id': router_id,
'snat_id': chosen_snat_agent.id})
return
with context.session.begin(subtransactions=True):
binding = l3agent_sch_db.RouterL3AgentBinding()
binding.l3_agent = chosen_snat_agent
binding.router_id = router_id
context.session.add(binding)
LOG.debug('Binding the distributed router %(router_id)s to '
'the snat agent %(snat_id)s',
{'router_id': router_id,
'snat_id': chosen_snat_agent.id})
def bind_snat_servicenode(self, context, router_id, snat_candidates):
"""Bind the snat router to the chosen l3 service agent."""
chosen_snat_agent = random.choice(snat_candidates)
self.bind_snat_router(context, router_id, chosen_snat_agent)
return chosen_snat_agent
def unbind_snat_servicenode(self, context, router_id):
"""Unbind the snat router to the chosen l3 service agent."""
vm_ports = []
with context.session.begin(subtransactions=True):
query = (context.session.
query(CentralizedSnatL3AgentBinding).
filter_by(router_id=router_id))
try:
binding = query.one()
except exc.NoResultFound:
LOG.debug('no snat router binding found for %s', router_id)
return
host = binding.l3_agent.host
subnet_ids = self.get_subnet_ids_on_router(context, router_id)
for subnet in subnet_ids:
vm_ports = (
self._core_plugin.get_ports_on_host_by_subnet(
context, host, subnet))
if vm_ports:
LOG.debug('One or more ports exist on the snat enabled '
'l3_agent host %(host)s and router_id %(id)s',
{'host': host, 'id': router_id})
break
agent_id = binding.l3_agent_id
LOG.debug('Delete binding of the SNAT router %(router_id)s '
'from agent %(id)s', {'router_id': router_id,
'id': agent_id})
context.session.delete(binding)
if not vm_ports:
query = (context.session.
query(l3agent_sch_db.RouterL3AgentBinding).
filter_by(router_id=router_id,
l3_agent_id=agent_id).
delete(synchronize_session=False))
self.l3_rpc_notifier.router_removed_from_agent(
context, router_id, host)
LOG.debug('Removed binding for router %(router_id)s and '
'agent %(id)s', {'router_id': router_id, 'id': agent_id})
def get_snat_bindings(self, context, router_ids):
""" Retrieves the dvr snat bindings for a router."""
if not router_ids:
return []
query = context.session.query(CentralizedSnatL3AgentBinding)
query = query.options(joinedload('l3_agent')).filter(
CentralizedSnatL3AgentBinding.router_id.in_(router_ids))
return query.all()
def schedule_snat_router(self, context, router_id, sync_router):
"""Schedule the snat router on l3 service agent."""
active_l3_agents = self.get_l3_agents(context, active=True)
if not active_l3_agents:
LOG.warn(_('No active L3 agents found for SNAT'))
return
snat_candidates = self.get_snat_candidates(sync_router,
active_l3_agents)
if snat_candidates:
chosen_agent = self.bind_snat_servicenode(
context, router_id, snat_candidates)
self.bind_dvr_router_servicenode(
context, router_id, chosen_agent)
return chosen_agent
| {
"content_hash": "1a46b4c52aa1cbef54293fa5927d8acd",
"timestamp": "",
"source": "github",
"line_count": 292,
"max_line_length": 79,
"avg_line_length": 47.94178082191781,
"alnum_prop": 0.5598971355096792,
"repo_name": "redhat-openstack/neutron",
"id": "10a7e256e41603d6d6456d8d9142247828d963db",
"size": "14666",
"binary": false,
"copies": "1",
"ref": "refs/heads/f22-patches",
"path": "neutron/db/l3_dvrscheduler_db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1043"
},
{
"name": "Python",
"bytes": "10433756"
},
{
"name": "Shell",
"bytes": "11069"
}
],
"symlink_target": ""
} |
"""
"""
__import__('pkg_resources').declare_namespace(__name__)
#EOF
| {
"content_hash": "aab976c88b1f4a61aedca532050e84a9",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 55,
"avg_line_length": 11.833333333333334,
"alnum_prop": 0.5774647887323944,
"repo_name": "samjy/sloot.object",
"id": "361de315d7d7d579fe2576991e7cff133100a512",
"size": "118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sloot/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18093"
}
],
"symlink_target": ""
} |
import django_filters.rest_framework as filters
from .models import Monster, SkillEffect, Skill, LeaderSkill, ScalingStat
class MonsterFilter(filters.FilterSet):
name = filters.CharFilter(method='filter_name')
element = filters.MultipleChoiceFilter(choices=Monster.ELEMENT_CHOICES)
archetype = filters.MultipleChoiceFilter(choices=Monster.ARCHETYPE_CHOICES)
awaken_level = filters.MultipleChoiceFilter(choices=Monster.AWAKEN_CHOICES)
leader_skill_attribute = filters.MultipleChoiceFilter(field_name='leader_skill__attribute', choices=LeaderSkill.ATTRIBUTE_CHOICES)
leader_skill_area = filters.MultipleChoiceFilter(field_name='leader_skill__area', choices=LeaderSkill.AREA_CHOICES)
order_by = filters.OrderingFilter(fields=[
'name', 'element', 'base_stars', 'natural_stars', 'archetype', 'com2us_id', 'family_id',
'raw_hp', 'raw_attack', 'raw_defense',
'base_hp', 'base_attack', 'base_defense',
'max_lvl_hp', 'max_lvl_attack', 'max_lvl_defense',
'speed', 'crit_rate', 'crit_damage', 'resistance', 'accuracy',
])
class Meta:
model = Monster
fields = {
'id': ['in'],
'com2us_id': ['exact'],
'family_id': ['exact'],
'base_stars': ['exact', 'lte', 'gte'],
'natural_stars': ['exact', 'lte', 'gte'],
'obtainable': ['exact'],
'fusion_food': ['exact'],
'homunculus': ['exact'],
}
def filter_name(self, queryset, name, value):
if value:
return queryset.filter(name__istartswith=value)
else:
return queryset
class SkillFilter(filters.FilterSet):
name = filters.CharFilter(method='filter_name')
description = filters.CharFilter(method='filter_description')
scaling_stats__pk = filters.ModelMultipleChoiceFilter(queryset=ScalingStat.objects.all(), to_field_name='pk', conjoined=True)
effects_logic = filters.BooleanFilter(method='filter_effects_logic')
effect__pk = filters.ModelMultipleChoiceFilter(queryset=SkillEffect.objects.all(), method='filter_effects')
used_on = filters.NumberFilter(method='filter_used_on')
class Meta:
model = Skill
fields = {
'id': ['in'],
'name': ['exact'],
'com2us_id': ['exact'],
'slot': ['exact'],
'cooltime': ['exact', 'isnull', 'gte', 'lte', 'gt', 'lt'],
'hits': ['exact', 'isnull', 'gte', 'lte', 'gt', 'lt'],
'aoe': ['exact'],
'passive': ['exact'],
'max_level': ['exact', 'gte', 'lte', 'gt', 'lt'],
}
def filter_name(self, queryset, name, value):
return queryset.filter(name__istartswith=value)
def filter_description(self, queryset, name, value):
return queryset.filter(description__icontains=value)
def filter_effects(self, queryset, name, value):
old_filtering = self.form.cleaned_data.get('effects_logic', False)
stat_scaling = self.form.cleaned_data.get('scaling_stats__pk', [])
if old_filtering:
# Filter if any skill on the monster has the designated fields
for effect in value:
queryset = queryset.filter(effect=effect)
for pk in stat_scaling:
queryset = queryset.filter(scaling_stats=pk)
return queryset.distinct()
else:
# Filter effects based on effects of each individual skill. This ensures a monster will not show up unless it has
# the desired effects on the same skill rather than across any skills.
skills = Skill.objects.all()
for effect in value:
skills = skills.filter(effect=effect)
for pk in stat_scaling:
skills = skills.filter(scaling_stats=pk)
return queryset.filter(pk__in=skills).distinct()
def filter_used_on(self, queryset, name, value):
return queryset.filter(monster__pk=value)
def filter_effects_logic(self, queryset, name, value):
# This field is just used to alter the logic of skill effect filter
return queryset
| {
"content_hash": "a4557f99cd9444d2edaa3f33c176443c",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 134,
"avg_line_length": 41.64,
"alnum_prop": 0.6162343900096061,
"repo_name": "PeteAndersen/swarfarm",
"id": "a0d79765d07f7490fdd6c18cb7e6abd10966d3de",
"size": "4164",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bestiary/api_filters.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "31891"
},
{
"name": "HTML",
"bytes": "352588"
},
{
"name": "JavaScript",
"bytes": "79075"
},
{
"name": "Python",
"bytes": "982216"
},
{
"name": "Shell",
"bytes": "3403"
}
],
"symlink_target": ""
} |
from .. import HopliteAppTestCase
from hoplite.api import create_app
class HopliteApiTestCase(HopliteAppTestCase):
def _create_app(self):
return create_app('hoplite.test_jobs')
| {
"content_hash": "d6aafff4a250c95e39b9feaea5a1ef5c",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 46,
"avg_line_length": 27.285714285714285,
"alnum_prop": 0.7539267015706806,
"repo_name": "ni/hoplite",
"id": "37e128cec7dc8cdc9c7c236c463d1ee63812f262",
"size": "191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/api/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "367"
},
{
"name": "Python",
"bytes": "238787"
}
],
"symlink_target": ""
} |
from .circuits import *
from .providers import *
| {
"content_hash": "e446be9861af487588eda69908ff1810",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 24,
"avg_line_length": 24.5,
"alnum_prop": 0.7551020408163265,
"repo_name": "digitalocean/netbox",
"id": "7bbaf75d367e288c6d58764a6f3f6ee78621d25b",
"size": "49",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "netbox/circuits/models/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "189339"
},
{
"name": "HTML",
"bytes": "570800"
},
{
"name": "JavaScript",
"bytes": "326125"
},
{
"name": "Python",
"bytes": "1815170"
},
{
"name": "Shell",
"bytes": "2786"
}
],
"symlink_target": ""
} |
import ldap as pyldap
import ldap.sasl as sasl
import ldap.modlist
import re
from datetime import datetime, date
from copy import deepcopy
class CSHLDAP:
def __init__(self, user, password, host='ldaps://ldap.csh.rit.edu:636', \
base='ou=Users,dc=csh,dc=rit,dc=edu', bind='ou=Apps,dc=csh,dc=rit,dc=edu', app=False, simple=False):
self.host = host
self.base = base
self.users = 'ou=Users,dc=csh,dc=rit,dc=edu'
self.groups = 'ou=Groups,dc=csh,dc=rit,dc=edu'
self.committees = 'ou=Committees,dc=csh,dc=rit,dc=edu'
self.ldap = pyldap.initialize(host)
self.ldap.set_option(pyldap.OPT_X_TLS_DEMAND, True)
self.ldap.set_option(pyldap.OPT_DEBUG_LEVEL, 255)
if app:
self.ldap.simple_bind('cn=' + user + ',' + bind, password)
elif simple:
self.ldap.simple_bind('uid=' + user + ',' + base, password)
else:
try:
auth = sasl.gssapi("")
self.ldap.sasl_interactive_bind_s("", auth)
self.ldap.set_option(pyldap.OPT_DEBUG_LEVEL, 0)
except pyldap.LDAPError, e:
print('Are you sure you\'ve run kinit?')
print(e)
def members(self, uid="*", objects=False):
""" members() issues an ldap query for all users, and returns a dict
for each matching entry. This can be quite slow, and takes roughly
3s to complete. You may optionally restrict the scope by specifying
a uid, which is roughly equivalent to a search(uid='foo')
"""
entries = self.search(uid='*')
if objects:
return self.memberObjects(entries)
result = []
for entry in entries:
result.append(entry[1])
return result
def member(self, user, objects=False):
""" Returns a user as a dict of attributes
"""
try:
member = self.search(uid=user, objects=objects)[0]
except IndexError:
return None
if objects:
return member
return member[1]
def eboard(self, objects=False):
""" Returns a list of eboard members formatted as a search
inserts an extra ['commmittee'] attribute
"""
# self.committee used as base because that's where eboard
# info is kept
committees = self.search(base=self.committees, cn='*')
directors = []
for committee in committees:
for head in committee[1]['head']:
director = self.search(dn=head)[0]
director[1]['committee'] = committee[1]['cn'][0]
directors.append(director)
if objects:
return self.memberObjects(directors)
return directors
def group(self, group_cn, objects=False):
members = self.search(base=self.groups, cn=group_cn)
if len(members) == 0:
return members
else:
member_dns = members[0][1]['member']
members = []
for member_dn in member_dns:
members.append(self.search(dn=member_dn)[0])
if objects:
return self.memberObjects(members)
return members
def getGroups(self, member_dn):
searchResult = self.search(base=self.groups, member=member_dn)
if len(searchResult) == 0: return []
groupList = []
for group in searchResult:
groupList.append(group[1]['cn'][0])
return groupList
def drinkAdmins(self, objects=False):
""" Returns a list of drink admins uids
"""
admins = self.group('drink', objects=objects)
return admins
def rtps(self, objects=False):
rtps = self.group('rtp', objects=objects)
return rtps
def trimResult(self, result):
return [x[1] for x in result]
def search(self, base=False, trim=False, objects=False, **kwargs):
""" Returns matching entries for search in ldap
structured as [(dn, {attributes})]
UNLESS searching by dn, in which case the first match
is returned
"""
scope = pyldap.SCOPE_SUBTREE
if not base:
base = self.users
filterstr = ''
for key, value in kwargs.iteritems():
filterstr += '({0}={1})'.format(key, value)
if key == 'dn':
filterstr = '(objectClass=*)'
base = value
scope = pyldap.SCOPE_BASE
break
if len(kwargs) > 1:
filterstr = '(&' + filterstr + ')'
result = self.ldap.search_s(base, pyldap.SCOPE_SUBTREE, filterstr, ['*', '+'])
if base == self.users:
for member in result:
groups = self.getGroups(member[0])
member[1]['groups'] = groups
if 'eboard' in member[1]['groups']:
member[1]['committee'] = self.search(base=self.committees, \
head=member[0])[0][1]['cn'][0]
if objects:
return self.memberObjects(result)
finalResult = self.trimResult(result) if trim else result
return finalResult
def modify(self, uid, base=False, **kwargs):
if not base:
base = self.users
dn = 'uid=' + uid + ',ou=Users,dc=csh,dc=rit,dc=edu'
old_attrs = self.member(uid)
new_attrs = deepcopy(old_attrs)
for field, value in kwargs.iteritems():
if field in old_attrs:
new_attrs[field] = [str(value)]
modlist = pyldap.modlist.modifyModlist(old_attrs, new_attrs)
self.ldap.modify_s(dn, modlist)
def memberObjects(self, searchResults):
results = []
for result in searchResults:
newMember = Member(result, ldap=self)
results.append(newMember)
return results
class Member(object):
def __init__(self, member, ldap=None):
""" Creates and returns a member object from which LDAP fields
are accessible as properties. If you supply an LDAP connection,
the object will use that connection to reload its data and
modify its fields if you choose.
"""
self.specialFields = ("memberDict", "ldap")
if len(member) < 2:
self.memberDict = {}
else:
self.memberDict = member[1]
self.ldap = ldap
def __getattr__(self, attribute):
""" Accesses the internal dictionary representation of
a member and returns whatever data type it represents.
"""
if (attribute == "specialFields" or
attribute in self.specialFields):
return object.__getattribute__(self, attribute)
try:
# Grab the object at that key. It will be a list,
# if it exists.
attributes = self.memberDict[attribute]
# If we do get a list, and it only
# contains one thing, just return that
# one thing.
if len(attributes) == 1:
attribute = attributes[0]
# If it's a digit, convert it to an int and return.
if attribute.isdigit():
attribute = int(attribute)
# Return the attribute.
return attribute
# Return the list.
return attributes
# If there was an error (i.e. that member doesn't have that
# key in their LDAP store), then return None. We couldn't get it.
except (KeyError, IndexError):
return None
def __setattr__(self, attribute, value):
""" When setting an attribute with 'member.field = "value"',
access the internal ldap connection from the constructor
and modify that parameter.
"""
if (attribute == "specialFields" or
attribute in self.specialFields):
return object.__setattr__(self, attribute, value)
if attribute in ("memberDict", "ldap"):
object.__setattr__(self, attribute, value)
return
if not self.ldap:
return
kwargs = {attribute: value}
self.ldap.modify(uid=self.uid, **kwargs)
self.memberDict[attribute] = value
def fields(self):
""" Returns all of the keys in the internal dictionary.
"""
return self.memberDict.keys()
def isActive(self):
""" Is the user active?
"""
return bool(self.active)
def isAlumni(self):
""" Is the user an alumnus/a?
"""
return bool(self.alumni)
def isDrinkAdmin(self):
""" Is the user a drink admin?
"""
return bool(self.drinkAdmin)
def isOnFloor(self):
""" Is the user on floor?
"""
return bool(self.onfloor)
def isEboard(self):
""" Is the user on Eboard?
"""
return 'eboard' in self.groups
def isRTP(self):
""" Is the user an RTP?
"""
return 'rtp' in self.groups
def isBirthday(self):
""" Is it the user's birthday today?
"""
if not self.birthday:
return False
birthday = self.birthdate()
today = date.today()
return (birthday.month == today.month and
birthday.day == today.day)
def birthdate(self):
""" Converts the user's birthday (if it exists) to a datetime.date
object that can easily be compared with other dates.
"""
if not self.birthday:
return None
return dateFromLDAPTimestamp(self.birthday)
def joindate(self):
""" Converts the user's join date (if it exists) to a datetime.date
object that can easily be compared with other dates.
"""
if not self.memberSince:
return None
joined = self.memberSince
return dateFromLDAPTimestamp(joined)
def age(self):
""" Returns the user's age, determined by their birthdate()
"""
if not self.birthdate():
return -1
adjuster = 0
today = date.today()
birthday = self.birthdate()
if today.month == birthday.month:
if today.day < birthday.day:
adjuster -= 1
elif today.month < birthday.month:
adjuster -= 1
return (today.year - birthday.year) + adjuster
def reload(self):
""" If there is an LDAP connection, query it for another
instance of this member and set its internal dictionary
to that result.
"""
if not self.ldap:
return
self.memberDict = self.ldap.member(self.uid)
def fullName(self):
""" Returns a reliable full name (firstName lastName) for every
member (as of the writing of this comment.)
"""
if self.givenName and self.sn:
return "{0} {1}".format(self.givenName, self.sn)
if self.givenName:
return self.givenName
if self.sn:
return self.sn
return self.uid
def __str__(self):
""" Constructs a string representation of this person, containing
every key and value in their internal dictionary.
"""
string = ""
for key in self.memberDict.keys():
thing = self.__getattr__(key)
string += str(key) + ": " + str(thing) + "\n"
return string
def dateFromLDAPTimestamp(timestamp):
""" Takes an LDAP date (In the form YYYYmmdd
with whatever is after that) and returns a
datetime.date object.
"""
# only check the first 8 characters: YYYYmmdd
numberOfCharacters = len("YYYYmmdd")
timestamp = timestamp[:numberOfCharacters]
try:
day = datetime.strptime(timestamp, '%Y%m%d')
return date(year=day.year, month=day.month, day=day.day)
except:
print(timestamp)
| {
"content_hash": "9ba52c30c5cc1490b10e641e46c6a6a4",
"timestamp": "",
"source": "github",
"line_count": 350,
"max_line_length": 117,
"avg_line_length": 34.38,
"alnum_prop": 0.5577162802293693,
"repo_name": "stevenmirabito/csh-asterisk",
"id": "328f85bf3cc356f97d0f8501d3ae64ab93b0f432",
"size": "12092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CSHLDAP.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "246246"
}
],
"symlink_target": ""
} |
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1IngressList(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, api_version=None, items=None, kind=None, metadata=None):
"""
V1beta1IngressList - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_version': 'str',
'items': 'list[V1beta1Ingress]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
self.attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
self._api_version = api_version
self._items = items
self._kind = kind
self._metadata = metadata
@property
def api_version(self):
"""
Gets the api_version of this V1beta1IngressList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1beta1IngressList.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1beta1IngressList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1beta1IngressList.
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""
Gets the items of this V1beta1IngressList.
Items is the list of Ingress.
:return: The items of this V1beta1IngressList.
:rtype: list[V1beta1Ingress]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this V1beta1IngressList.
Items is the list of Ingress.
:param items: The items of this V1beta1IngressList.
:type: list[V1beta1Ingress]
"""
if items is None:
raise ValueError("Invalid value for `items`, must not be `None`")
self._items = items
@property
def kind(self):
"""
Gets the kind of this V1beta1IngressList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1beta1IngressList.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1beta1IngressList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1beta1IngressList.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1beta1IngressList.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:return: The metadata of this V1beta1IngressList.
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1beta1IngressList.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:param metadata: The metadata of this V1beta1IngressList.
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1IngressList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "027cedc2d45e6df54993e25eebbc4c25",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 281,
"avg_line_length": 32.38461538461539,
"alnum_prop": 0.5946159936658749,
"repo_name": "djkonro/client-python",
"id": "e0109f95cc08fd3eaa39aa7fab56d7388aa48558",
"size": "6332",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v1beta1_ingress_list.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6527154"
},
{
"name": "Shell",
"bytes": "16522"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'FailedAccessAttempt.expired'
db.add_column('cerberos_failedaccessattempt', 'expired',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'FailedAccessAttempt.expired'
db.delete_column('cerberos_failedaccessattempt', 'expired')
models = {
'cerberos.failedaccessattempt': {
'Meta': {'object_name': 'FailedAccessAttempt'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'failed_logins': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'get_data': ('django.db.models.fields.TextField', [], {}),
'http_accept': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'db_index': 'True'}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'path_info': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'post_data': ('django.db.models.fields.TextField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'user_agent': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cerberos'] | {
"content_hash": "163a2cfdd89154d859ddc1e0763c8853",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 131,
"avg_line_length": 52.93617021276596,
"alnum_prop": 0.5667202572347267,
"repo_name": "AdrianRibao/cerberos",
"id": "806c85975fda9eb0a6351904617de6e8faa97ba4",
"size": "2512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cerberos/migrations/0005_auto__add_field_failedaccessattempt_expired.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "36250"
},
{
"name": "Shell",
"bytes": "5117"
}
],
"symlink_target": ""
} |
import base64
import logging
try:
import cPickle as pickle
except:
import pickle
from django.contrib.admin.widgets import RelatedFieldWidgetWrapper
from django.contrib.admin.widgets import AdminTextInputWidget
from django.forms import widgets, TextInput, Textarea, Media
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from selectable.forms.widgets import SelectableMediaMixin, SelectableMultiWidget, \
LookupMultipleHiddenInput
from django.utils.http import urlencode
from metashare import settings
from selectable.forms.widgets import AutoCompleteWidget, AutoCompleteSelectWidget
from django.forms.util import flatatt
from django.utils.encoding import force_unicode
from django import forms
# Setup logging support.
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(settings.LOG_HANDLER)
# the maximum length (in characters) where `TextInput` widgets will still be
# used; for larger sizes we use `Textarea` widgets
_MAX_TEXT_INPUT_SIZE = 150
class DictWidget(widgets.Widget):
"""
A widget for rendering dictionaries as represented by `DictField` form
fields.
By default key/value input widgets will be `TextInput` widgets. If a
sufficiently large maximum size is specified for either of them, the input
widgets may also become `Textarea` widgets.
"""
class Media:
css = { 'all': ('{}css/dict_widget.css'.format(
settings.ADMIN_MEDIA_PREFIX),) }
js = ('{}js/dict_widget.js'.format(settings.ADMIN_MEDIA_PREFIX),)
# templates for the names of key/value "<input/>" fields
_key_field_name_tpl = 'key_{}_{}'
_val_field_name_tpl = 'val_{}_{}'
key_widget = None
def __init__(self, blank=False, max_key_length=None, max_val_length=None):
"""
Initializes a new `DictWidget` with the given maximum dictionary
key/value sizes (in characters).
The `blank` argument denotes whether empty dictionarys should be allowed
or not. This is only enforced via JavaScript, though!
"""
self.blank = blank
self.max_key_length = max_key_length
self.max_val_length = max_val_length
# path to the Django template which is used to render this widget
self._template = 'repository/editor/dict_widget.html'
super(DictWidget, self).__init__()
def render(self, name, value, attrs=None):
"""
Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively.
"""
_entries = []
_context = { 'blank': self.blank, 'entries': _entries,
'new_entry_tpl': self._get_dict_entry(name, '', None, None) }
if isinstance(value, dict):
# (we have gotten an existing Python dict)
idx = 0
for key, val in value.iteritems():
_entries.append(self._get_dict_entry(name, idx, key, val, value))
idx += 1
elif isinstance(value, list):
# (we have gotten a non-valid key/value pair list that was created
# in value_from_datadict())
idx = 0
for entry in value:
_entries.append(self._get_dict_entry(name, idx, entry[0],
entry[1], value))
idx += 1
elif not self.blank:
# (we probably have gotten the value None, i.e., the dictionary is
# only being created; an empty first entry is only shown if the
# dictionary must not be blank)
_entries.append(self._get_dict_entry(name, 0, None, None))
# render final HTML for this widget instance
return mark_safe(render_to_string(self._template, _context))
def _get_dict_entry(self, field_name, idx, key, value, values_list=None):
"""
Returns a tuple (pair) with a rendered key and value input field.
`field_name` and `id` will be used in the names of the input fields.
"""
_key_field_name = DictWidget._key_field_name_tpl.format(field_name, idx)
if self.key_widget:
if isinstance(self.key_widget, type):
self.key_widget = self.key_widget()
rendered_key = self.key_widget.render(_key_field_name , key)
else:
if self.max_key_length:
if self.max_key_length > _MAX_TEXT_INPUT_SIZE:
rendered_key = Textarea().render(_key_field_name, key)
else:
rendered_key = \
TextInput(attrs={ 'maxlength': self.max_key_length }) \
.render(_key_field_name, key)
else:
rendered_key = TextInput().render(_key_field_name, key)
_val_field_name = DictWidget._val_field_name_tpl.format(field_name, idx)
if self.max_val_length:
if self.max_val_length > _MAX_TEXT_INPUT_SIZE:
rendered_val = Textarea().render(_val_field_name, value)
else:
rendered_val = \
TextInput(attrs={ 'maxlength': self.max_val_length }) \
.render(_val_field_name, value)
else:
rendered_val = TextInput().render(_val_field_name, value)
return (rendered_key, rendered_val)
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data from the input form and this widget's name,
returns the value of this widget as a list of key/value pairs (or None
if the list would be empty).
"""
# collect the key/value data that was provided by the user (not as a
# dictionary, so that we can later cleanly validate the data):
provided = []
idx = 0
while True:
key_name = DictWidget._key_field_name_tpl.format(name, idx)
val_name = DictWidget._val_field_name_tpl.format(name, idx)
if not key_name in data or not val_name in data:
break
provided.append((data[key_name], data[val_name]))
idx += 1
if len(provided) != 0:
return provided
return None
class LangDictWidget(DictWidget):
"""
A `DictWidget` which has RFC 3066 language codes as keys.
"""
def __init__(self, *args, **kwargs):
self.key_widget = LangAutoCompleteWidget
super(LangDictWidget, self).__init__(*args, **kwargs)
# path to the Django template which is used to render this widget
self._template = 'repository/editor/lang_dict_widget.html'
def _get_dict_entry(self, field_name, idx, key, value, values_list=None):
# Set the default value only if the values_list is empty.
# This should occur if the key,value does not come from
# the database nor from user input.
if not values_list and not key and not value:
# by default we (blindly) propose the ISO 639-1 language code for
# English (as per WP7 request in issue #206)
key = 'en'
return super(LangDictWidget, self)._get_dict_entry(field_name, idx, key,
value)
def _media(self):
"""
Returns a `Media` object for this widget which is dynamically created
from the JavaScript of `DictWidget` and CSS specific to this widget.
"""
# pylint: disable-msg=E1101
return Media(js = ('js/jquery-ui.min.js',
'{}js/pycountry.js'\
.format(settings.ADMIN_MEDIA_PREFIX),
'{}js/autocomp.js'\
.format(settings.ADMIN_MEDIA_PREFIX),
'{}js/lang_dict_widget.js'\
.format(settings.ADMIN_MEDIA_PREFIX),)) \
+ Media(css={'all': ('{}css/lang_dict_widget.css'.format(
settings.ADMIN_MEDIA_PREFIX),)})
media = property(_media)
class SubclassableRelatedFieldWidgetWrapper(RelatedFieldWidgetWrapper):
"""
A replacement for RelatedWidgetWrapper suitable for related fields which are subclassable.
Instead of the default 'plus' button to add a new related item in a popup window,
this implementation shows a dropdown menu letting the user choose which subtype to create.
"""
class Media:
js = (
settings.MEDIA_URL + "js/choice-type-widget.js",
)
def __init__(self, widget, rel, admin_site, *args, **kwargs):
"""
Initialises this widget instance.
"""
super(SubclassableRelatedFieldWidgetWrapper, self).__init__(widget, rel,
admin_site, *args, **kwargs)
self.subclass_select, self.subclasses = self._compute_sub_classes()
def _compute_sub_classes(self):
"""
Computes the choice tuple of available sub classes for this widget.
"""
_subclasses = []
_instance = self.rel.to()
for _cls in _instance.__class__.__subclasses__():
_subclass = _cls()
data_type = _subclass.__class__.__name__.lower()
type_name = _subclass.__schema_name__
if type_name == "STRINGMODEL":
type_name = _subclass.__class__.__name__
type_name = type_name[0:type_name.find('String_model')]
_subclasses.append((data_type, type_name))
_choices = [('', 'Create new ...')]
_choices.extend(_subclasses)
if len(_choices) == 1:
raise AssertionError('No sub classes found for {}?'.format(
_instance.__class__.__name__))
_subclass_select = widgets.Select(choices=_choices)
return _subclass_select, _subclasses
def render(self, name, value, *args, **kwargs):
# We are not using self.admin_site.root_path as this seems broken...
proto_url = '/{}admin/{}'.format(settings.DJANGO_BASE,
self.rel.to._meta.app_label)
self.widget.choices = self.choices
output = [self.widget.render(name, value, *args, **kwargs)]
if self.can_add_related:
output.append(' ')
# Salvatore: changed from 'onclick' to 'onchange' because
# on Windows browsers onclick is triggered as soon as the
# user click on the down arrow and before he/she actually
# selects the item from the list.
output.append(self.subclass_select.render('subclass_select', '',
attrs={'onchange': 'javascript:createNewSubInstance($(this), ' \
'"add_id_{}", "{}");'.format(name, proto_url)}))
return mark_safe(u''.join(output))
class MultiFieldWidget(widgets.Widget):
"""
A MultiFieldWidget allows to enter lists of data using a certain widget.
Attributes:
- widget: input widget, defaults to TextInput.
- style: CSS style settings for the input widget.
"""
class Media:
"""
Media sub class to inject custom CSS and JavaScript code.
"""
css = {
'all': ('css/repository.css',)
}
js = ('js/multi-field-widget.js',)
def __init__(self, widget_id, max_length=None, **kwargs):
"""
Initialises a new MultiFieldWidget instance.
This saves the given, required widget_id, clears the errors dictionary
and then calls the super class constructor with any remaining args.
Any max_length argument is used to determine the appropriate size for
the input fields.
"""
self.widget_id = widget_id
self.max_length = max_length
self.errors = {}
super(MultiFieldWidget, self).__init__(**kwargs)
def _render_input_widget(self, name, value, attrs):
"""
Renders and returns the most suitable widget for inputting a single
field in this `MultiFieldWidget`.
"""
if self.max_length:
if self.max_length > _MAX_TEXT_INPUT_SIZE:
result = Textarea().render(name, value, attrs)
else:
result = TextInput(attrs={ 'maxlength': self.max_length }) \
.render(name, value, attrs)
else:
result = TextInput().render(name, value, attrs)
return result
def _render_container(self, _context):
return render_to_string('repository/container.html', _context)
def _render_multifield(self, _context):
return render_to_string('repository/multi_field_widget.html', _context)
def render(self, name, value, attrs=None):
"""
Renders the MultiFieldWidget with the given name and value.
"""
LOGGER.debug('render({0}, {1} [{2}])'.format(name, value, type(value)))
LOGGER.debug('attrs: {0} errors: {1}'.format(self.attrs, self.errors))
# If no value is given, we set it to an empty list.
if not value:
value = []
# If we get a String object instead of the expected list-typed value,
# there has been a validation problem. This means that the value is
# not yet converted from its serialised form into a list of values.
if isinstance(value, basestring):
# Try converting the String to list type.
try:
value = pickle.loads(base64.b64decode(value))
#
except:
LOGGER.error('Error converting value to list!')
value = []
# We collect all rendered widgets inside _field_widgets.
_field_widgets = []
_field_attrs = {'id': 'id_{0}'.format(name), 'class': 'input',
'style': self.attrs.get('style', 'width:250px;')}
# Iterate over all sub values for this MultiFieldWidget instance,
# adding an index number 0..n-1 to support container id generation.
for _id, _value in enumerate(value):
# Render input_widget instance as HTML.
_field_widget = self._render_input_widget(name, _value,
_field_attrs)
# Define context for container template rendering.
_context = {'id': _id, 'field_widget': _field_widget,
'widget_id': self.widget_id,
'admin_media_prefix': settings.ADMIN_MEDIA_PREFIX,
'field_name': name}
# If there have been any validation errors, add the message.
if _value in self.errors.keys():
_context.update({'error_msg': self.errors[_value]})
# Render container for this sub value's widget and append to list.
_container = self._render_container(_context)
_field_widgets.append(_container)
# If list of values is empty, render an empty container instead.
_id = len(value)
if not _id:
# Note that value='' as values is empty.
_field_widget = self._render_input_widget(name, '', _field_attrs)
_context = {'id': _id, 'field_widget': _field_widget,
'widget_id': self.widget_id,
'admin_media_prefix': settings.ADMIN_MEDIA_PREFIX,
'field_name': name}
_container = self._render_container(_context)
_field_widgets.append(_container)
_field_widget = self._render_input_widget(name, '', _field_attrs)
_context = {'id': _id, 'field_widget': _field_widget,
'admin_media_prefix': settings.ADMIN_MEDIA_PREFIX}
# The JavaScript code needs an empty "template" to create new input
# widgets dynamically; this is pre-rendered and added to the template
# for the MultiFieldWidget instance here.
_empty_widget = self._render_input_widget(name, '', _field_attrs)
_context = {'empty_widget': _empty_widget,
'field_widgets': mark_safe(u'\n'.join(_field_widgets)),
'widget_id': self.widget_id,
'admin_media_prefix': settings.ADMIN_MEDIA_PREFIX,
'field_name': name}
# Render final HTML for this MultiFieldWidget instance.
_html = self._render_multifield(_context)
return mark_safe(_html)
def value_from_datadict(self, data, files, name):
"""
Encodes the data for this MultiFieldWidget instance as base64 String.
"""
_value = [v for v in data.getlist(name) if v]
if not len(_value):
return None
return base64.b64encode(pickle.dumps(_value))
def _has_changed(self, initial, data):
"""
Checks whether the field values have changed. As data is already an
pickled, base64 encoded String, we have to de-serialise it first!
"""
_data = data
if isinstance(data, basestring):
# Try converting the String to list type.
try:
_data = pickle.loads(base64.b64decode(data))
#
except:
LOGGER.error('Error converting value to list!')
_data = []
elif data is None:
# Salvatore: If the user leaves the field empty assigning
# an empty list will result in a comparison between
# None (the value of initial) and [] (the value of _data),
# yielding a True value as if the user changed the value
# of the field. Check if in other cases this should really
# be the empty list.
#_data = []
pass
return initial != _data
class TestHiddenWidget(TextInput, SelectableMediaMixin):
def __init__(self, lookup_class, *args, **kwargs):
self.lookup_class = lookup_class
self.allow_new = kwargs.pop('allow_new', False)
self.qset = kwargs.pop('query_params', {})
self.limit = kwargs.pop('limit', None)
super(TestHiddenWidget, self).__init__(*args, **kwargs)
def update_query_parameters(self, qs_dict):
self.qset.update(qs_dict)
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super(TestHiddenWidget, self).build_attrs(extra_attrs, **kwargs)
url = self.lookup_class.url()
if self.limit and 'limit' not in self.qset:
self.qset['limit'] = self.limit
if self.qset:
url = '%s?%s' % (url, urlencode(self.qset))
attrs[u'data-selectable-url'] = url
attrs[u'data-selectable-type'] = 'text'
attrs[u'data-selectable-allow-new'] = str(self.allow_new).lower()
attrs[u'type'] = 'hidden'
return attrs
def render(self, name, value, attrs=None):
return mark_safe(super(TestHiddenWidget, self).render(name, value, attrs))
class OneToManyWidget(SelectableMultiWidget, SelectableMediaMixin):
def __init__(self, lookup_class, *args, **kwargs):
self.lookup_class = lookup_class
self.limit = kwargs.pop('limit', None)
position = kwargs.pop('position', 'bottom')
attrs = {
u'data-selectable-multiple': 'true',
u'data-selectable-position': position,
u'data-selectable-allow-editing': 'true'
}
query_params = kwargs.pop('query_params', {})
widget_list = [
TestHiddenWidget(
lookup_class, allow_new=False,
limit=self.limit, query_params=query_params, attrs=attrs
),
LookupMultipleHiddenInput(lookup_class)
]
super(OneToManyWidget, self).__init__(widget_list, *args, **kwargs)
def value_from_datadict(self, data, files, name):
return self.widgets[1].value_from_datadict(data, files, name + '_1')
def render(self, name, value, attrs=None):
if value and not hasattr(value, '__iter__'):
value = [value]
value = [u'', value]
return super(OneToManyWidget, self).render(name, value, attrs)
def decompress(self, value):
pass
class ComboWidget(AdminTextInputWidget):
class Media:
"""
Media sub class to inject custom CSS and JavaScript code.
"""
css = {
'all': ('{}css/themes/smoothness/jquery-ui.css'
.format(settings.ADMIN_MEDIA_PREFIX),
'{}css/combo.css'.format(settings.ADMIN_MEDIA_PREFIX))
}
js = ('js/jquery-ui.min.js',
'{}js/pycountry.js'.format(settings.ADMIN_MEDIA_PREFIX),
'{}js/autocomp.js'.format(settings.ADMIN_MEDIA_PREFIX),)
def __init__(self, field_type=None, attrs=None):
self.field_type = field_type
self.id_field = attrs.pop('id_field')
self.name_field = attrs.pop('name_field')
if not attrs:
attrs = {}
super(ComboWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
val = super(ComboWidget, self).render(name, value, attrs)
if 'id' in attrs:
id1 = attrs['id']
if self.field_type == 'id':
linked_to = attrs['id'].replace(self.id_field, self.name_field)
js_script = u'<script>autocomp_single("id", "{0}", "{1}");</script>'.format(id1, linked_to)
elif self.field_type == 'name':
linked_to = attrs['id'].replace(self.name_field, self.id_field)
js_script = u'<script>autocomp_single("name", "{0}", "{1}");</script>'.format(id1, linked_to)
val = val + js_script
return mark_safe(val)
class MultiComboWidget(MultiFieldWidget):
class Media:
"""
Media sub class to inject custom CSS and JavaScript code.
"""
css = {
'all': ('{}css/themes/smoothness/jquery-ui.css'
.format(settings.ADMIN_MEDIA_PREFIX),
'{}css/combo.css'.format(settings.ADMIN_MEDIA_PREFIX))
}
js = ('js/jquery-ui.min.js',
'{}js/pycountry.js'.format(settings.ADMIN_MEDIA_PREFIX),
'{}js/autocomp.js'.format(settings.ADMIN_MEDIA_PREFIX),)
def __init__(self, field_type=None, attrs=None, widget_id=None, max_length=None, **kwargs):
self.field_type = field_type
self.id_field = attrs.pop('id_field')
self.name_field = attrs.pop('name_field')
super(MultiComboWidget, self).__init__(widget_id, max_length, **kwargs)
def _render_container(self, _context):
if self.field_type == 'name':
_context.update({'autocomp_name': True})
linked_field_name = _context['field_name']
linked_field_name = linked_field_name.replace(self.name_field, self.id_field)
_context.update({'linked_field_name': linked_field_name})
elif self.field_type == 'id':
_context.update({'autocomp_id': True})
linked_field_name = _context['field_name']
linked_field_name = linked_field_name.replace(self.id_field, self.name_field)
_context.update({'linked_field_name': linked_field_name})
val = super(MultiComboWidget, self)._render_container(_context)
return val
def _render_multifield(self, _context):
if self.field_type == 'name':
_context.update({'autocomp_name': True})
linked_field_name = _context['field_name']
linked_field_name = linked_field_name.replace(self.name_field, self.id_field)
_context.update({'linked_field_name': linked_field_name})
elif self.field_type == 'id':
_context.update({'autocomp_id': True})
linked_field_name = _context['field_name']
linked_field_name = linked_field_name.replace(self.id_field, self.name_field)
_context.update({'linked_field_name': linked_field_name})
val = super(MultiComboWidget, self)._render_multifield(_context)
return val
class LangAutoCompleteWidget(widgets.Widget):
class Media:
js = ('js/jquery-ui.min.js',
'{}js/pycountry.js'.format(settings.ADMIN_MEDIA_PREFIX),
'{}js/autocomp.js'.format(settings.ADMIN_MEDIA_PREFIX),)
css = {}
def __init__(self, attrs=None):
super(LangAutoCompleteWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
if not value:
value = u''
res1 = u'<input type="text" class="lang_autocomplete" name="{0}" value="{1}"/>'.format(name, value)
res2 = u'</br><span class="lang_name" for="{0}"/>'.format(name)
res = res1 + res2
res = mark_safe(res)
return res
class AutoCompleteSelectMultipleEditWidget(SelectableMultiWidget, SelectableMediaMixin):
def __init__(self, lookup_class, *args, **kwargs):
self.lookup_class = lookup_class
default_lookup_widget = LookupMultipleHiddenInput
self.lookup_widget = kwargs.pop('lookup_widget', None)
if not self.lookup_widget:
self.lookup_widget = default_lookup_widget
self.limit = kwargs.pop('limit', None)
position = kwargs.pop('position', 'bottom')
more_attrs = kwargs.pop('attrs', None)
proto_url = '/{}editor/repository/'.format(settings.DJANGO_BASE)
attrs = {
u'data-selectable-multiple': 'true',
u'data-selectable-position': position,
u'data-selectable-allow-editing': 'true',
u'data-selectable-base-url': proto_url,
u'data-selectable-throbber-img': '{0}img/admin/throbber_16.gif'.format(settings.ADMIN_MEDIA_PREFIX),
u'data-selectable-use-state-error': 'false',
}
if more_attrs:
attrs.update(more_attrs)
query_params = kwargs.pop('query_params', {})
widget_list = [
AutoCompleteWidget(
lookup_class, allow_new=False,
limit=self.limit, query_params=query_params, attrs=attrs
),
self.lookup_widget(lookup_class)
]
super(AutoCompleteSelectMultipleEditWidget, self).__init__(widget_list, *args, **kwargs)
def value_from_datadict(self, data, files, name):
return self.widgets[1].value_from_datadict(data, files, name + '_1')
def render(self, name, value, attrs=None):
if value and not hasattr(value, '__iter__'):
value = [value]
value = [u'', value]
html_code = super(AutoCompleteSelectMultipleEditWidget, self).render(name, value, attrs)
return html_code
def decompress(self, value):
pass
# Copied from django.contrib.admin.widgets.ManyToManyRawIdWidget class
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
for pk1, pk2 in zip(initial, data):
if force_unicode(pk1) != force_unicode(pk2):
return True
return False
class AutoCompleteSelectMultipleSubClsWidget(AutoCompleteSelectMultipleEditWidget):
def __init__(self, lookup_class, *args, **kwargs):
new_attrs = {u'data-selectable-is-subclassable': 'true',}
kwargs.update({'attrs': new_attrs})
kwargs.update({'lookup_widget': LookupMultipleHiddenInputMS})
super(AutoCompleteSelectMultipleSubClsWidget, self).__init__(lookup_class, *args, **kwargs)
class LookupMultipleHiddenInputMS(LookupMultipleHiddenInput):
def render(self, name, value, attrs=None, choices=()):
lookup = self.lookup_class()
if value is None:
value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
id_ = final_attrs.get('id', None)
inputs = []
model = getattr(self.lookup_class, 'model', None)
for index, val in enumerate(value):
item = None
if model and isinstance(val, model):
item = val
val = lookup.get_item_id(item)
input_attrs = dict(value=force_unicode(val), **final_attrs)
if id_:
# An ID attribute was given. Add a numeric index as a suffix
# so that the inputs don't all have the same ID attribute.
input_attrs['id'] = '%s_%s' % (id_, index)
if val:
item = item or lookup.get_item(val)
item_cls = item.as_subclass().__class__.__name__.lower()
input_attrs['title'] = lookup.get_item_value(item)
input_attrs['model-class'] = item_cls
inputs.append(u'<input%s />' % flatatt(input_attrs))
return mark_safe(u'\n'.join(inputs))
class AutoCompleteSelectSingleWidget(AutoCompleteSelectWidget):
def __init__(self, lookup_class, *args, **kwargs):
self.lookup_class = lookup_class
self.allow_new = kwargs.pop('allow_new', False)
self.limit = kwargs.pop('limit', None)
query_params = kwargs.pop('query_params', {})
attrs = {
u'data-selectable-throbber-img': '{0}img/admin/throbber_16.gif'.format(settings.ADMIN_MEDIA_PREFIX),
u'data-selectable-use-state-error': 'false',
}
widget_list = [
AutoCompleteWidget(
lookup_class, allow_new=self.allow_new,
limit=self.limit, query_params=query_params, attrs=attrs
),
forms.HiddenInput(attrs={u'data-selectable-type': 'hidden'})
]
# Directly call the super-super-class __init__ method.
# The super-class __init__ method does not allow custom attributes
# to be passed to the AutoCompleteWidget. For this reason this
# __init__ method is a modified version of the super-class one
# and replaces it.
# pylint: disable-msg=E1003
super(AutoCompleteSelectWidget, self).__init__(widget_list, *args, **kwargs)
| {
"content_hash": "762e1c98b73f38b40ff60127111091d4",
"timestamp": "",
"source": "github",
"line_count": 719,
"max_line_length": 112,
"avg_line_length": 42.013908205841446,
"alnum_prop": 0.5831898834745762,
"repo_name": "JuliBakagianni/CEF-ELRC",
"id": "ce69f7a19f03ea0b6af96f59aa0f4f62e9929d64",
"size": "30208",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metashare/repository/editor/widgets.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7362"
},
{
"name": "C",
"bytes": "321"
},
{
"name": "C++",
"bytes": "112277"
},
{
"name": "CSS",
"bytes": "201968"
},
{
"name": "HTML",
"bytes": "3056700"
},
{
"name": "Java",
"bytes": "12780"
},
{
"name": "JavaScript",
"bytes": "347862"
},
{
"name": "Makefile",
"bytes": "6778"
},
{
"name": "Python",
"bytes": "10052542"
},
{
"name": "Shell",
"bytes": "111376"
},
{
"name": "XSLT",
"bytes": "473763"
}
],
"symlink_target": ""
} |
import time, subprocess, platform, threading, queue, os
from collections import deque
from housepy import osc, util, log, config, process
process.secure_pid(os.path.abspath(os.path.join(os.path.dirname(__file__), "run")))
log.info("--> controller is %s" % config['controller'])
class Health(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self.sender = osc.Sender(config['controller'], 23232)
self.queue = queue.Queue()
self.start()
def run(self):
loaded = False
while True:
status = None
while True:
try:
status = self.queue.get_nowait()
loaded = True if status == 'loaded' else False
except queue.Empty:
break
if status is None:
status = "ready" if not loaded else "loaded"
self.sender.send("/health", [config['name'], status])
time.sleep(config['health_rate'])
health = Health()
class Player(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self.queue = queue.Queue()
self.process = None
self.start()
def run(self):
while True:
sound = self.queue.get()
try:
if platform.system() == "Darwin":
bn = "afplay"
elif platform.system() == "Linux":
bn = "mpg123" if sound[-3:] == "mp3" else "aplay"
path = os.path.abspath(os.path.join(os.path.dirname(__file__), "snd", sound))
self.process = subprocess.Popen([bn, path])
health.queue.put('playing')
while True:
# log.debug(self.process.poll())
if self.process.poll() is None:
health.queue.put('playing')
time.sleep(config['health_rate'])
else:
self.process = None
break
except Exception as e:
log.error(log.exc(e))
health.queue.put('playing failed')
def stop(self):
# this is not strictly thread-safe, is it? could poll and terminate collide?
try:
if self.process is not None:
self.process.terminate()
health.queue.put('stopped')
except Exception as e:
log.error(log.exc(e))
health.queue.put('stop failed')
player = Player()
timers = []
def message_handler(ip, address, data):
if address == '/cue':
for timer in timers: # clears currently loaded cues
timer.cancel()
try:
ts = [float(d) for i, d in enumerate(data) if i % 2 == 0]
ns = [ d for i, d in enumerate(data) if i % 2 == 1]
for cue in deque(zip(ts, ns)):
timer = threading.Timer(cue[0], player.queue.put, (cue[1],))
health.queue.put('loaded')
timers.append(timer)
timer.start()
except Exception as e:
log.error(log.exc(e))
health.queue.put('failed')
elif address == '/stop':
player.stop()
for timer in timers:
timer.cancel()
else:
log.error("Unknown comand (%s)" % address)
return
osc.Receiver(5280, message_handler, blocking=True)
| {
"content_hash": "e92a77ec2b43ed43fb0fbaf830c36e23",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 93,
"avg_line_length": 33.41121495327103,
"alnum_prop": 0.4993006993006993,
"repo_name": "brianhouse/dcue",
"id": "5867494e3e049012c13d7a068433908e0f12e113",
"size": "3599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "syncbox.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7132"
}
],
"symlink_target": ""
} |
"""WatchRule object."""
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from heat.db import api as db_api
from heat.objects import fields as heat_fields
from heat.objects import stack
from heat.objects import watch_data
class WatchRule(base.VersionedObject, base.VersionedObjectDictCompat):
fields = {
'id': fields.IntegerField(),
'name': fields.StringField(nullable=True),
'rule': heat_fields.JsonField(nullable=True),
'state': fields.StringField(nullable=True),
'last_evaluated': fields.DateTimeField(nullable=True),
'stack_id': fields.StringField(),
'stack': fields.ObjectField(stack.Stack),
'watch_data': fields.ListOfObjectsField(watch_data.WatchData),
'created_at': fields.DateTimeField(read_only=True),
'updated_at': fields.DateTimeField(nullable=True),
}
@staticmethod
def _from_db_object(context, rule, db_rule):
for field in rule.fields:
if field == 'stack':
rule[field] = stack.Stack._from_db_object(
context, stack.Stack(), db_rule[field])
elif field == 'watch_data':
rule[field] = watch_data.WatchData.get_all_by_watch_rule_id(
context, db_rule['id'])
else:
rule[field] = db_rule[field]
rule._context = context
rule.obj_reset_changes()
return rule
@classmethod
def get_by_id(cls, context, rule_id):
db_rule = db_api.watch_rule_get(context, rule_id)
return cls._from_db_object(context, cls(), db_rule)
@classmethod
def get_by_name(cls, context, watch_rule_name):
db_rule = db_api.watch_rule_get_by_name(context, watch_rule_name)
return cls._from_db_object(context, cls(), db_rule)
@classmethod
def get_all(cls, context):
return [cls._from_db_object(context, cls(), db_rule)
for db_rule in db_api.watch_rule_get_all(context)]
@classmethod
def get_all_by_stack(cls, context, stack_id):
return [cls._from_db_object(context, cls(), db_rule)
for db_rule in db_api.watch_rule_get_all_by_stack(context,
stack_id)]
@classmethod
def update_by_id(cls, context, watch_id, values):
db_api.watch_rule_update(context, watch_id, values)
@classmethod
def create(cls, context, values):
return cls._from_db_object(context, cls(),
db_api.watch_rule_create(context, values))
@classmethod
def delete(cls, context, watch_id):
db_api.watch_rule_delete(context, watch_id)
| {
"content_hash": "ddb27be87125a2d658f37c5192abec41",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 77,
"avg_line_length": 36.729729729729726,
"alnum_prop": 0.6070640176600441,
"repo_name": "pratikmallya/heat",
"id": "cf71e3723995a1712e759b88c6772a0276e3320b",
"size": "3290",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "heat/objects/watch_rule.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6929579"
},
{
"name": "Shell",
"bytes": "33092"
}
],
"symlink_target": ""
} |
import cfp_common
import nltk
import sys
class CfpCommonV1(cfp_common.CfpCommon):
# top-level section -> (list weight, list nonterminal for that section)
list_weights = {nltk.Nonterminal("CFP_TOPIC_SECTION"):
(1,nltk.Nonterminal("CFP_TOPIC_LIST")),
nltk.Nonterminal("LOC_SECTION"):
(.5,nltk.Nonterminal("LOC_LIST")),
nltk.Nonterminal("ORGS_SECTION"):
(1,nltk.Nonterminal("ORGS_LIST")),
nltk.Nonterminal("STEER_SECTION"):
(1,nltk.Nonterminal("STEER_LIST")),
nltk.Nonterminal("KEYNOTE_SECTION"):
(7,nltk.Nonterminal("KEYNOTE_LIST")),
nltk.Nonterminal("PC_SECTION"):
(5,nltk.Nonterminal("PC_LIST"))}
recursive_terms = [nltk.Nonterminal("CFP_TOPIC_LIST"),
nltk.Nonterminal("PROF_LIST_PAREN"),
nltk.Nonterminal("PROF_LIST_COMMA"),
nltk.Nonterminal("PROF_LIST_DASH"),
nltk.Nonterminal("LOC_LIST"),
nltk.Nonterminal("KEYNOTE_LIST_DASH")]
newline_terms = {nltk.Nonterminal("CFP_GREETING"):1,
nltk.Nonterminal("CFP_TOPIC_HEADER"):1,
nltk.Nonterminal("CFP_TOPIC_LIST_ITEM"):1,
nltk.Nonterminal("PROF_LIST_PAREN_ITEM"):1,
nltk.Nonterminal("PROF_LIST_COMMA_ITEM"):1,
nltk.Nonterminal("PROF_LIST_DASH_ITEM"):1,
nltk.Nonterminal("KEYNOTE_ITEM_DASH"):1,
nltk.Nonterminal("ORGS_HEADER"):1,
nltk.Nonterminal("PC_HEADER"):1,
nltk.Nonterminal("STEER_HEADER"):1,
nltk.Nonterminal("KEYNOTE_HEADER"):1,
nltk.Nonterminal("LOC_HEADER"):1,
nltk.Nonterminal("LOC_PLACE_ITEM"):1,
nltk.Nonterminal("LOC_UNIV_ITEM"):1,
nltk.Nonterminal("DATE_HEADER"):1,
nltk.Nonterminal("SUBSTITUTE_DATE_NL"):1,
nltk.Nonterminal("DATE_TYPE_1_NL"):1,
nltk.Nonterminal("DATE_TYPE_2_NL"):1,
nltk.Nonterminal("DATE_TYPE_3_NL"):1,
nltk.Nonterminal("DATE_TYPE_4_NL"):1,
nltk.Nonterminal("CFP_INTRO_SECTION"):1,
nltk.Nonterminal("CFP_SCOPE_SECTION"):1,
nltk.Nonterminal("CFP_SUBMIT_SECTION"):1,
nltk.Nonterminal("SPACE_NEWLINE"):1}
last_or_not_terms = {nltk.Nonterminal("SUBMIT_CLOSING"):False}
@staticmethod
def version():
return 1
def chars_to_remove_a_space_before(self):
return '.,:;\?\)\!'
def chars_to_remove_a_space_after(self):
return '\('
def list_recursive_terms(self):
return CfpCommonV1.recursive_terms
def append_newlines(self):
return CfpCommonV1.newline_terms
def choose_last_or_nots(self):
return CfpCommonV1.last_or_not_terms
def calc_list_bits(self, msg_len, body_prod):
# we only care about lists that are actually used in the body
used_lists = {w[1]: w[0] for l,w in self.list_weights.iteritems()
if l in body_prod.rhs()}
total_weight = sum(used_lists.values())
# we'll get most of our entropy from lists, but we should make
# sure that the bits are spread out among the lists as much as
# possible. So given a set of lists, each with weight w (total
# weight of W), and a number of bits remaining = B, make sure
# B*w/W bits are used up in this list. Multiply by some fraction
# since other parts of the message will use some bits too.
fraction_in_lists = 0.85
list_bits = {}
for l,w in used_lists.iteritems():
list_bits[l] = int(msg_len*fraction_in_lists*w/total_weight)
return list_bits
def header_cfg_filename(self):
return "cfp_header.cfg"
def body_cfg_filename(self):
return "cfp_body.cfg"
cfp_common.CfpCommon.register_common(CfpCommonV1)
| {
"content_hash": "d7302cf17aabb78a302dfa92b8f02495",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 76,
"avg_line_length": 44.01020408163265,
"alnum_prop": 0.5365175052167864,
"repo_name": "strib/scipher",
"id": "143141cd45bfdd1f50baf6dac37c3aef174e8e5d",
"size": "4448",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cfp_common_v1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45512"
},
{
"name": "Shell",
"bytes": "609"
}
],
"symlink_target": ""
} |
"""
OpenMDAO Wrapper for PDCYL.
PDCYL is a structural esimation code that was pulled from ACSYNT.
"""
# pylint: disable-msg=E0611,F0401
from openmdao.lib.components.api import ExternalCode
from openmdao.main.api import FileMetadata
from openmdao.main.datatypes.api import Bool, Enum, Float, Int, Str
from openmdao.util.filewrap import FileParser
class PdcylComp(ExternalCode):
""" OpenMDAO component wrapper for PDCYL. """
icalc = Bool(False, iotype='in', desc='Print switch. Set to True for verbose output.')
title = Str("PDCYl Component", iotype='in', desc='Title of the analysis')
# Wing geometry
# --------------------
wsweep = Float(iotype='in', units='deg', desc='Wing sweep referenced to the leading edge')
war = Float(iotype='in', desc='Wing Aspect Ratio')
wtaper = Float(iotype='in', desc=' Wing taper ratio')
wtcroot = Float(iotype='in', desc=' Wing thickness-to-cord at root')
wtctip = Float(iotype='in', desc=' Wing thickness-to-cord at tip')
warea = Float(iotype='in', units='ft**2', desc='Wing planform area')
# Material properties
# --------------------
ps = Float(iotype='in', desc='Plasticity factor')
tmgw = Float(iotype='in', units='inch', desc='Min. gage thickness for the wing')
effw = Float(iotype='in', desc='Buckling efficiency of the web')
effc = Float(iotype='in', desc='Buckling efficiency of the covers')
esw = Float(iotype='in', units='psi', desc="Young's Modulus for wing material")
fcsw = Float(iotype='in', units='psi', desc='Ult. compressive strength of wing')
dsw = Float(iotype='in', units='lb/inch**3', desc=' Density of the wing material')
kdew = Float(iotype='in', desc="Knock-down factor for Young's Modulus")
kdfw = Float(iotype='in', desc='Knock-down factor for Ultimate strength')
# Geometric parameters
# --------------------
istama = Enum([1, 2], iotype='in', desc=' 1 - Position of wing is unknown; 2 - position is known')
cs1 = Float(iotype='in', desc='Position of structural wing box from leading edge as percent of root chord')
cs2 = Float(iotype='in', desc=' Position of structural wing box from trailing edge as percent of root chord')
uwwg = Float(iotype='in', units='lb/ft**2', desc=' Wing Weight / Wing Area of baseline aircraft ')
xwloc1 = Float(iotype='in', desc=' Location of wing as a percentage of body length')
# Structural Concept
# --------------------
claqr = Float(iotype='in', desc='Ratio of body lift to wing lift')
ifuel = Enum([1, 2], iotype='in', desc=' 1 - No fuel is stored in the wing; 2 - Fuel is stored in the wing')
cwman = Float(iotype='in', desc='Design maneuver load factor')
cf = Float(iotype='in', desc="Shanley's const. for frame bending")
# Tails
# --------------------
itail = Enum([1, 2], iotype='in',
desc=' 1 - Control surfaces mounted on tail; 2 - Control surfaces mounted on wing')
uwt = Float(iotype='in', units='lb/ft**2', desc='(Htail Weight + Vtail Weight) / Htail Area of baseline aircraft')
clrt = Float(iotype='in', desc=' Location of tail as a percentage of body length')
harea = Float(iotype='in', units='ft**2', desc=' Location of tail as a percentage of body length')
# Fuselage geometry
# --------------------
frn = Float(iotype='in', desc='Fineness ratio of the nose section (length/diameter)')
frab = Float(iotype='in', desc='Fineness ratio of the after-body section (length/diameter)')
bodl = Float(iotype='in', units='ft', desc='Length of the fuselage ')
bdmax = Float(iotype='in', units='ft', desc='Maximum diameter of fuselage')
# These vars are listed in the pdcyl code, but they are never read in. Not sure
# what that's all about.
# vbod = Float(iotype='in', units='ft**3', desc='Fuselage total volume ')
# volnose = Float(iotype='in', units='ft**3', desc='Nose Volume')
# voltail = Float(iotype='in', units='ft**3', desc='Tail volume ')
# Structural Concept
# --------------------
ckf = Float(iotype='in', desc='Frame stiffness coefficient')
ec = Float(iotype='in', desc='Power in approximation equation for buckling stability')
kgc = Float(iotype='in', desc='Buckling coefficient for component general buckling of stiffener web panel')
kgw = Float(iotype='in', desc='Buckling coefficient for component local buckling of web panel')
# KCONT(12) ! Structural Geometry Concept Top/Bottom
# KCONB(12) ! 2 - Simply stiffened shell, frames, sized for minimum weight in buckling
# ! 3 - Z-stiffened shell, frames, best buckling
# ! 4 - Z-stiffened shell, frames, buckling-minimum gage compromise
# ! 5 - Z-stiffened shell, frames, buckling-pressure compromise
# ! 6 - Truss-core sandwich, frames, best buckling
# ! 8 - Truss-core sandwich, no frames, best buckling
# ! 9 - Truss-core sandwich, no frames, buckling-min. gage-pressure compromise
kcont = Enum([2, 3, 4, 5, 6, 8, 9], iotype='in', desc='Structural Geometry Concept Top')
kconb = Enum([2, 3, 4, 5, 6, 8, 9], iotype='in', desc='Structural Geometry Concept Bottom')
# Material properties
# -------------------
ftst = Float(iotype='in', desc="Tensile Strength on Top")
ftsb = Float(iotype='in', desc="Tensile Strength on Bottom")
fcst = Float(iotype='in', desc="Compressive Strength on Top")
fcsb = Float(iotype='in', desc="Compressive Strength on Bottom")
est = Float(iotype='in', desc="Young's Modulus for the shells Top")
esb = Float(iotype='in', desc="Young's Modulus for the shells Bottom")
eft = Float(iotype='in', desc="Young's Modulus for the frames Top")
efb = Float(iotype='in', desc="Young's Modulus for the frames Bottom")
dst = Float(iotype='in', desc="Density of shell material on Top")
dsb = Float(iotype='in', desc="Density of shell material on Bottom")
dft = Float(iotype='in', desc="Density of frame material on Top")
dfb = Float(iotype='in', desc="Density of frame material on Bottom")
tmgt = Float(iotype='in', desc="Minimum gage thickness Top")
tmgb = Float(iotype='in', desc="Minimum gage thickness Bottom")
kde = Float(iotype='in', desc="Knock-down factor for modulus")
kdf = Float(iotype='in', desc="Knock-down factor for strength")
# Geometric parameters
# --------------------
clbr1 = Float(iotype='in', desc='Fuselage break point as a fraction of total fuselage length')
icyl = Enum([1, 0], iotype='in',
desc=' 1 - modeled with a mid-body cylinder, 0 - use two power-law bodies back to back')
# Engines
# --------------------
neng = Int(iotype='in', desc=' Total number of engines')
nengwing = Int(iotype='in', desc=' Number of engines on wing')
wfp = Float(iotype='in', desc='(Engine Weight * NENG) / WGTO')
clrw1 = Float(iotype='in', desc=' Location of first engine pair. Input 0 for centerline engine.')
clrw2 = Float(iotype='in', desc=' Location of second engine pair. measured from body centerline')
clrw3 = Float(iotype='in', desc=' Location of third engine pair. measured from body centerline')
# Loads
# --------------------
deslf = Float(iotype='in', desc='Design load factor')
ultlf = Float(iotype='in', desc='Ultimate load factor (usually 1.5*DESLF)')
axac = Float(iotype='in', desc='Axial acceleration')
cman = Float(iotype='in', desc=' Weight fraction at maneuver')
iload = Enum([1, 2, 3], iotype='in',
desc='1 - Analyze maneuver only; 2 - Analyze maneuver and landing only; 3 - Analyze bump, landing and maneuver')
pgt = Float(iotype='in', desc="Fuselage gage pressure on top")
pgb = Float(iotype='in', desc="Fuselage gage pressure on bottom")
wfbump = Float(iotype='in', desc=' Weight fraction at bump')
wfland = Float(iotype='in', desc=' Weight fraction at landing')
# Landing Gear
# -------------------
vsink = Float(iotype='in', units='ft/s', desc='Design sink velocity at landing ')
stroke = Float(iotype='in', units='ft', desc=' Stroke of landing gear ')
clrg1 = Float(iotype='in',
desc='Length fraction of nose landing gear measured as a fraction of total fuselage length')
clrg2 = Float(iotype='in',
desc='Length fraction of main landing gear measured as a fraction of total fuselage length')
wfgr1 = Float(iotype='in', desc='Weight fraction of nose landing gear')
wfgr2 = Float(iotype='in', desc='Weight fraction of main landing gear')
igear = Enum([1, 2], iotype='in',
desc='1 - Main landing gear located on fuselage,2 - Main landing gear located on wing')
gfrl = Float(iotype='in', desc='Ratio of force taken by nose landing gear to force taken by main gear at landing')
clrgw1 = Float(iotype='in', desc='Position of wing gear as a fraction of structural semispan')
clrgw2 = Float(iotype='in', desc='Position of second pair wing gear as a fraction of structural semispan')
# Weights
# -------------------
wgto = Float(iotype='in', units='lb', desc=' Gross takeoff weight')
wtff = Float(iotype='in', desc='Weight fraction of fuel')
cbum = Float(iotype='in', desc='Weight fraction at bump')
clan = Float(iotype='in', desc='Weight fraction at landing')
# Factors
# --------------------
ischrenk = Int(iotype='in', desc='1 - use Schrenk load distribution on wing,Else - use trapezoidal distribution')
icomnd = Enum([1, 2], iotype='in',
desc='1 - print gross shell dimensions envelope,2 - print detailed shell geometry')
wgno = Float(iotype='in', desc='Nonoptimal factor for wing (including the secondary structure)')
slfmb = Float(iotype='in', desc='Static load factor for bumps')
wmis = Float(iotype='in', desc='Volume component of secondary structure')
wsur = Float(iotype='in', desc='Surface area component of secondary structure')
wcw = Float(iotype='in', desc='Factor in weight equation for nonoptimal weights')
wca = Float(iotype='in', desc='Factor in weight equation for nonoptimal weights')
nwing = Int(iotype='in', desc='Number of wing segments for analysis')
# Outputs
# --------------------
wfuselaget = Float(iotype='out', units='lb', desc='Total fuselage weight')
wwingt = Float(iotype='out', units='lb', desc='Total wing weight')
def __init__(self):
"""Constructor for the PdcylComp component"""
super(PdcylComp, self).__init__()
# External Code public variables
self.stdin = 'PDCYL.in'
self.stdout = 'PDCYL.out'
self.stderr = 'PDCYL.err'
self.command = ['PDCYL']
self.external_files = [
FileMetadata(path=self.stdin, input=True),
FileMetadata(path=self.stdout),
FileMetadata(path=self.stderr),
]
# Dictionary contains location of every numeric scalar variable
fields = {}
fields[8] = 'wsweep'
fields[9] = 'war'
fields[10] = 'wtaper'
fields[11] = 'wtcroot'
fields[12] = 'wtctip'
fields[13] = 'warea'
fields[15] = 'ps'
fields[16] = 'tmgw'
fields[17] = 'effw'
fields[18] = 'effc'
fields[19] = 'esw'
fields[20] = 'fcsw'
fields[21] = 'dsw'
fields[22] = 'kdew'
fields[23] = 'kdfw'
fields[25] = 'istama'
fields[27] = 'cs1'
fields[28] = 'cs2'
fields[29] = 'uwwg'
fields[30] = 'xwloc1'
fields[32] = 'claqr'
fields[33] = 'ifuel'
fields[35] = 'cwman'
fields[36] = 'cf'
fields[40] = 'itail'
fields[42] = 'uwt'
fields[43] = 'clrt'
fields[44] = 'harea'
fields[49] = 'frn'
fields[50] = 'frab'
fields[51] = 'bodl'
fields[52] = 'bdmax'
fields[54] = 'ckf'
fields[55] = 'ec'
fields[56] = 'kgc'
fields[57] = 'kgw'
fields[58] = 'kcont'
fields[59] = 'kconb'
fields[67] = 'ftst'
fields[68] = 'ftsb'
fields[69] = 'fcst'
fields[70] = 'fcsb'
fields[71] = 'est'
fields[72] = 'esb'
fields[73] = 'eft'
fields[74] = 'efb'
fields[75] = 'dst'
fields[76] = 'dsb'
fields[77] = 'dft'
fields[78] = 'dfb'
fields[79] = 'tmgt'
fields[80] = 'tmgb'
fields[81] = 'kde'
fields[82] = 'kdf'
fields[84] = 'clbr1'
fields[85] = 'icyl'
fields[90] = 'neng'
fields[91] = 'nengwing'
fields[92] = 'wfp'
fields[93] = 'clrw1'
fields[95] = 'clrw2'
fields[96] = 'clrw3'
fields[100] = 'deslf'
fields[101] = 'ultlf'
fields[102] = 'axac'
fields[103] = 'cman'
fields[104] = 'iload'
fields[107] = 'pgt'
fields[108] = 'pgb'
fields[109] = 'wfbump'
fields[110] = 'wfland'
fields[114] = 'vsink'
fields[115] = 'stroke'
fields[116] = 'clrg1'
fields[117] = 'clrg2'
fields[118] = 'wfgr1'
fields[119] = 'wfgr2'
fields[120] = 'igear'
fields[122] = 'gfrl'
fields[123] = 'clrgw1'
fields[124] = 'clrgw2'
fields[129] = 'wgto'
fields[130] = 'wtff'
fields[131] = 'cbum'
fields[132] = 'clan'
fields[136] = 'ischrenk'
fields[138] = 'icomnd'
fields[140] = 'wgno'
fields[141] = 'slfmb'
fields[142] = 'wmis'
fields[143] = 'wsur'
fields[144] = 'wcw'
fields[145] = 'wca'
fields[146] = 'nwing'
self._fields = fields
def execute(self):
"""Run PDCYL."""
# Prepare the input file for PDCYL
self.generate_input()
# Run PDCYL via ExternalCode's execute function
super(PdcylComp, self).execute()
# Parse the outut file from PDCYL
self.parse_output()
def generate_input(self):
"""Creates the PDCYL custom input file."""
data = []
form = "%.15g %s\n"
# It turns out to be simple and quick to generate a new input file each
# time, rather than poking values into a template.
data.append("\n\n")
data.append(self.title)
data.append("\n\n")
if self.icalc == True:
icalc = 3
else:
icalc = 0
data.append("%d icalc print switch" % icalc)
data.append("\n\n\n")
data.append("Wing geometry:\n")
for nline in range(8, 14):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("Material properties:\n")
for nline in range(15, 24):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("Geometric properties:\n")
name = self._fields[25]
data.append(form % (self.get(name), name))
data.append("\n")
for nline in range(27, 31):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("Structural concept:\n")
for nline in range(32, 34):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n")
for nline in range(35, 37):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n")
data.append("Tails:\n")
name = self._fields[40]
data.append(form % (self.get(name), name))
data.append("\n")
for nline in range(42, 45):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n\n")
data.append("Fuselage geometry:\n")
for nline in range(49, 53):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("Structural concept:\n")
for nline in range(54, 60):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n\n\n\n\n")
data.append("Material properties:\n")
for nline in range(67, 83):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("Geometric parameters:\n")
for nline in range(84, 86):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n\n")
data.append("Engines:\n")
for nline in range(90, 94):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n")
for nline in range(95, 97):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n")
data.append("Loads:\n")
for nline in range(100, 105):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n")
for nline in range(107, 111):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n")
data.append("Landing gear:\n")
for nline in range(114, 121):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n")
for nline in range(122, 125):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n\n")
data.append("Weights:\n")
for nline in range(129, 133):
name = self._fields[nline]
data.append(form % (self.get(name), name))
data.append("\n\n")
data.append("Factors:\n")
name = self._fields[136]
data.append(form % (self.get(name), name))
data.append("\n")
name = self._fields[138]
data.append(form % (self.get(name), name))
data.append("\n")
for nline in range(140, 147):
name = self._fields[nline]
data.append(form % (self.get(name), name))
outfile = open(self.stdin, 'w')
outfile.writelines(data)
outfile.close()
def parse_output(self):
"""Parses the PCYL output file and extracts data."""
infile = FileParser()
infile.set_file(self.stdout)
self.wwingt = infile.transfer_keyvar("Total Wing Structural Weight", 1)
self.wfuselaget = infile.transfer_keyvar("Fuselage Total Structural Weight", 1)
def load_model(self, filename):
"""Reads in an existing PDCYL input file and populates the variable
tree with its values."""
infile = FileParser()
infile.set_file(filename)
# Title is a string
self.title = infile.transfer_line(2)
# Print flag becomes a Bool
if infile.transfer_var(4, 1) == 3:
self.icalc = True
else:
self.icalc = False
# Named variables in dictionary
for key, val in self._fields.iteritems():
self.set(val, infile.transfer_var(key, 1))
if __name__ == "__main__": # pragma: no cover
my_comp = PdcylComp()
my_comp.run()
| {
"content_hash": "122696d75413032c449bc291cf540a43",
"timestamp": "",
"source": "github",
"line_count": 473,
"max_line_length": 129,
"avg_line_length": 41.0676532769556,
"alnum_prop": 0.5801801801801801,
"repo_name": "DailyActie/Surrogate-Model",
"id": "48085cecca0bc06adce27b5cb1d3540e92db776c",
"size": "19425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "01-codes/OpenMDAO-Framework-dev/contrib/pdcyl/pdcyl_comp.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "345"
},
{
"name": "Batchfile",
"bytes": "18746"
},
{
"name": "C",
"bytes": "13004913"
},
{
"name": "C++",
"bytes": "14692003"
},
{
"name": "CMake",
"bytes": "72831"
},
{
"name": "CSS",
"bytes": "303488"
},
{
"name": "Fortran",
"bytes": "7339415"
},
{
"name": "HTML",
"bytes": "854774"
},
{
"name": "Java",
"bytes": "38854"
},
{
"name": "JavaScript",
"bytes": "2432846"
},
{
"name": "Jupyter Notebook",
"bytes": "829689"
},
{
"name": "M4",
"bytes": "1379"
},
{
"name": "Makefile",
"bytes": "48708"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PHP",
"bytes": "93585"
},
{
"name": "Pascal",
"bytes": "1449"
},
{
"name": "Perl",
"bytes": "1152272"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "34668203"
},
{
"name": "Roff",
"bytes": "5925"
},
{
"name": "Ruby",
"bytes": "92498"
},
{
"name": "Shell",
"bytes": "94698"
},
{
"name": "TeX",
"bytes": "156540"
},
{
"name": "TypeScript",
"bytes": "41691"
}
],
"symlink_target": ""
} |
"""A basic example of authentication requests within a hug API"""
import hug
import jwt
# Several authenticators are included in hug/authentication.py. These functions
# accept a verify_user function, which can be either an included function (such
# as the basic username/password function demonstrated below), or logic of your
# own. Verification functions return an object to store in the request context
# on successful authentication. Naturally, this is a trivial demo, and a much
# more robust verification function is recommended. This is for strictly
# illustrative purposes.
authentication = hug.authentication.basic(hug.authentication.verify("User1", "mypassword"))
@hug.get("/public")
def public_api_call():
return "Needs no authentication"
# Note that the logged in user can be accessed via a built-in directive.
# Directives can provide computed input parameters via an abstraction
# layer so as not to clutter your API functions with access to the raw
# request object.
@hug.get("/authenticated", requires=authentication)
def basic_auth_api_call(user: hug.directives.user):
return "Successfully authenticated with user: {0}".format(user)
# Here is a slightly less trivial example of how authentication might
# look in an API that uses keys.
# First, the user object stored in the context need not be a string,
# but can be any Python object.
class APIUser(object):
"""A minimal example of a rich User object"""
def __init__(self, user_id, api_key):
self.user_id = user_id
self.api_key = api_key
def api_key_verify(api_key):
magic_key = "5F00832B-DE24-4CAF-9638-C10D1C642C6C" # Obviously, this would hit your database
if api_key == magic_key:
# Success!
return APIUser("user_foo", api_key)
else:
# Invalid key
return None
api_key_authentication = hug.authentication.api_key(api_key_verify)
@hug.get("/key_authenticated", requires=api_key_authentication) # noqa
def basic_auth_api_call(user: hug.directives.user):
return "Successfully authenticated with user: {0}".format(user.user_id)
def token_verify(token):
secret_key = "super-secret-key-please-change"
try:
return jwt.decode(token, secret_key, algorithm="HS256")
except jwt.DecodeError:
return False
token_key_authentication = hug.authentication.token(token_verify)
@hug.get("/token_authenticated", requires=token_key_authentication) # noqa
def token_auth_call(user: hug.directives.user):
return "You are user: {0} with data {1}".format(user["user"], user["data"])
@hug.post("/token_generation") # noqa
def token_gen_call(username, password):
"""Authenticate and return a token"""
secret_key = "super-secret-key-please-change"
mockusername = "User2"
mockpassword = "Mypassword"
if mockpassword == password and mockusername == username: # This is an example. Don't do that.
return {
"token": jwt.encode({"user": username, "data": "mydata"}, secret_key, algorithm="HS256")
}
return "Invalid username and/or password for user: {0}".format(username)
| {
"content_hash": "a275a01ab80ca3dac9b14d1e638ee952",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 100,
"avg_line_length": 36.093023255813954,
"alnum_prop": 0.7152061855670103,
"repo_name": "timothycrosley/hug",
"id": "26d7d4d7ce92cedc9aa04acae33900619ef0d91d",
"size": "3104",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "examples/authentication.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "479"
},
{
"name": "HTML",
"bytes": "58"
},
{
"name": "Python",
"bytes": "444815"
},
{
"name": "Shell",
"bytes": "6103"
}
],
"symlink_target": ""
} |
from collections import Counter
from nltk import RegexpTokenizer
from nltk.stem.porter import PorterStemmer
from nltk.stem.snowball import RussianStemmer
from pymorphy2 import MorphAnalyzer
import artm
import numpy as np
from string import punctuation
def build_text_cleaner():
remove_chars = punctuation
remove_chars += u'…’—。,‘¸、ー一:ا°»·!«・¦‹】【█‐‑●》《،■“•'
translate_tab = {ord(p): u" " for p in remove_chars}
def replace_punctuation(raw_text):
try:
return raw_text.lower().translate(translate_tab)
except:
print raw_text.lower()
raise
return replace_punctuation
def get_text_processor():
txt_cleaner = build_text_cleaner()
tokenizer = RegexpTokenizer('[\w\d]+', gaps=False)
stemmer = PorterStemmer()
#ru_stemmer = RussianStemmer()
morph = MorphAnalyzer()
def inner(text):
tokens = tokenizer.tokenize(txt_cleaner(unicode(text)))
for token in tokens:
if all([c in u'1234567890йцукенгшщзхъэждлорпавыфячсмитьбю+-*/-_="$' for c in token]):
# yield ru_stemmer.stem(token)
yield morph.parse(token)[0].normal_form
else:
yield stemmer.stem(token)
return inner
def apply_box_cox(vector):
v = np.asarray(vector).copy()
v[np.where(v != 0)] = np.log(v[np.where(v != 0)]) # dont ask. some kind of box-cox transformation magic
v[np.where(v != 0)] = v[np.where(v != 0)] / v[np.where(v != 0)].sum()
v[np.where(v != 0)] = (1 - v[np.where(v != 0)]) / (1 - v[np.where(v != 0)]).sum()
v[np.where(v != 0)] = v[np.where(v != 0)] - v[np.where(v != 0)].min()
v[np.where(v != 0)] = v[np.where(v != 0)] / v[np.where(v != 0)].sum()
v = vector.reshape(-1) + v.reshape(-1) * 3 # just help trasformation by multy3 for more stable sampling
v = v / v.sum()
return v
def sample_from(vector, sample_size=442):
rnd = np.random.RandomState(seed=42)
try:
v = apply_box_cox(vector)
return Counter(dict((k, v) for k, v in enumerate(rnd.multinomial(sample_size, v.reshape(-1) * 0.99999)) if v > 0))
except Exception as e:
print vector.sum(), e
once_awared = set()
def to_vw(row, key='id', fields=['tag', 'text', 'classes']):
line = '%s ' % row[key][len('https://'):]
for field in fields:
if field not in row:
if field not in once_awared:
print 'WARNING: there is no %s field in the row %s' % (field, row[key])
once_awared.add(field)
continue
if row[field] is not None and len(row[field]) > 0:
line += '|%s ' % field
line += '%s ' % ' '.join(
'%s:%i' % (unicode(pair[0]).replace(':', ''), pair[-1]) for pair in row[field].items() if
pair[-1] > 0 and len(unicode(pair[0]).replace(':', '')) > 0)
return '%s\n' % line
def prepare_for_artm(df, dataset_name):
import io
vw_path = '../data/%s.vw' % dataset_name
with io.open(vw_path, 'w', encoding='utf8') as outcome:
for k, row in df.iterrows():
outcome.write(to_vw(row))
artm_path = '../data/%s_batches' % dataset_name
return artm.BatchVectorizer(target_folder=artm_path, data_path=vw_path, data_format='vowpal_wabbit',
batch_size=1000)
| {
"content_hash": "d3d395c2d78e33bf68ab8db0b11bf42d",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 122,
"avg_line_length": 33.73737373737374,
"alnum_prop": 0.5745508982035928,
"repo_name": "menshikh-iv/image2pic",
"id": "a67bab0b2fe2f72940fc9970a4c80eeeefaa4bc2",
"size": "3475",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "artm/workspace/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "573"
},
{
"name": "HTML",
"bytes": "11580"
},
{
"name": "JavaScript",
"bytes": "1471"
},
{
"name": "Jupyter Notebook",
"bytes": "70110"
},
{
"name": "Python",
"bytes": "13629"
},
{
"name": "Shell",
"bytes": "283"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Quotes(models.Model):
class Meta: # include this to ensure build in IDE
app_label = "Quotes_app"
quoter_first_name = models.CharField(max_length=255)
quoter_last_name = models.CharField(max_length=255)
quote_text = models.CharField(max_length=255)
def __str__(self):
return ' '.join([
self.quoter_first_name,
self.quoter_last_name,
])
| {
"content_hash": "b6892c04fcd448a7b5d2a7a6f418c6d7",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 56,
"avg_line_length": 26.894736842105264,
"alnum_prop": 0.6418786692759295,
"repo_name": "GunnerJnr/_CodeInstitute",
"id": "72482b52964ba02d9a38b4a27b0bf71c1ea71370",
"size": "535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Stream-3/Full-Stack-Development/4.Hello-Django-Administration/4.-Wire-Up-A-Model-To-A-Template/challenge_solution/QuoteOfTheDay/Quotes_app/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "336"
},
{
"name": "CSS",
"bytes": "2545480"
},
{
"name": "HTML",
"bytes": "708226"
},
{
"name": "JavaScript",
"bytes": "1984479"
},
{
"name": "Python",
"bytes": "1727585"
},
{
"name": "Shell",
"bytes": "75780"
},
{
"name": "TSQL",
"bytes": "642"
}
],
"symlink_target": ""
} |
from django.utils.translation import ugettext
class SocialAuthBaseException(ValueError):
"""Base class for pipeline exceptions."""
pass
class BackendError(SocialAuthBaseException):
def __unicode__(self):
return ugettext('Backend error: %s' % self.message)
class WrongBackend(BackendError):
def __init__(self, backend_name):
self.backend_name = backend_name
def __unicode__(self):
return ugettext('Incorrect authentication service "%s"') % \
self.backend_name
class NotAllowedToDisconnect(SocialAuthBaseException):
"""User is not allowed to disconnect it's social account."""
pass
class StopPipeline(SocialAuthBaseException):
"""Stop pipeline process exception.
Raise this exception to stop the rest of the pipeline process.
"""
def __unicode__(self):
return ugettext('Stop pipeline')
class AuthException(SocialAuthBaseException):
"""Auth process exception."""
def __init__(self, backend, *args, **kwargs):
self.backend = backend
super(AuthException, self).__init__(*args, **kwargs)
class AuthFailed(AuthException):
"""Auth process failed for some reason."""
def __unicode__(self):
if self.message == 'access_denied':
return ugettext('Authentication process was cancelled')
else:
return ugettext('Authentication failed: %s') % \
super(AuthFailed, self).__unicode__()
class AuthCanceled(AuthException):
"""Auth process was canceled by user."""
def __unicode__(self):
return ugettext('Authentication process canceled')
class AuthUnknownError(AuthException):
"""Unknown auth process error."""
def __unicode__(self):
err = 'An unknown error happened while authenticating %s'
return ugettext(err) % super(AuthUnknownError, self).__unicode__()
class AuthTokenError(AuthException):
"""Auth token error."""
def __unicode__(self):
msg = super(AuthTokenError, self).__unicode__()
return ugettext('Token error: %s') % msg
class AuthMissingParameter(AuthException):
"""Missing parameter needed to start or complete the process."""
def __init__(self, backend, parameter, *args, **kwargs):
self.parameter = parameter
super(AuthMissingParameter, self).__init__(backend, *args, **kwargs)
def __unicode__(self):
return ugettext('Missing needed parameter %s') % self.parameter
class AuthStateMissing(AuthException):
"""State parameter is incorrect."""
def __unicode__(self):
return ugettext('Session value state missing.')
class AuthStateForbidden(AuthException):
"""State parameter is incorrect."""
def __unicode__(self):
return ugettext('Wrong state parameter given.')
class AuthAlreadyAssociated(AuthException):
"""A different user has already associated the target social account"""
pass
class AuthTokenRevoked(AuthException):
"""User revoked the access_token in the provider."""
def __unicode__(self):
return ugettext('User revoke access to the token')
| {
"content_hash": "b6cb2edeb5c19859b158fbdfbb60f42f",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 76,
"avg_line_length": 30.821782178217823,
"alnum_prop": 0.6662383552842917,
"repo_name": "limdauto/django-social-auth",
"id": "0e0a5a23fdbde0d90b9c5893f6184bf589efedd3",
"size": "3113",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "social_auth/exceptions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "20595"
},
{
"name": "Makefile",
"bytes": "983"
},
{
"name": "Python",
"bytes": "366255"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
import os
import pytest
from ramlfications import parse
from ramlfications import parser as pw
from ramlfications.config import setup_config
from ramlfications.raml import RootNode, ResourceTypeNode, TraitNode
from ramlfications._helpers import load_file
from tests.base import EXAMPLES
@pytest.fixture(scope="session")
def github_raml():
raml_file = os.path.join(EXAMPLES, "github.raml")
return load_file(raml_file)
def test_parse_raml(github_raml):
config_file = os.path.join(EXAMPLES, "github-config.ini")
config = setup_config(config_file)
root = pw.parse_raml(github_raml, config)
assert isinstance(root, RootNode)
@pytest.fixture(scope="session")
def root():
raml_file = os.path.join(EXAMPLES, "github.raml")
loaded_raml_file = load_file(raml_file)
config_file = os.path.join(EXAMPLES, "github-config.ini")
config = setup_config(config_file)
return pw.create_root(loaded_raml_file, config)
def test_base_uri(root):
expected = "https://api.github.com/"
assert expected == root.base_uri
def test_protocols(root):
expected = ["HTTPS"]
assert expected == root.protocols
def test_docs(root):
assert root.documentation is None
def test_base_uri_params(root):
assert root.base_uri_params is None
def test_uri_params(root):
assert root.uri_params is None
def test_title(root):
assert root.title == "GitHub API"
def test_version(root):
assert root.version == "v3"
def test_schemas(root):
assert root.schemas is None
def test_media_type(root):
assert root.media_type == "application/json"
#####
# Security Schemes
#####
@pytest.fixture(scope="session")
def sec_schemes():
raml_file = os.path.join(EXAMPLES, "github.raml")
config = os.path.join(EXAMPLES, "github-config.ini")
api = parse(raml_file, config)
return api.security_schemes
def test_create_security_schemes(sec_schemes):
assert len(sec_schemes) == 2
assert sec_schemes[0].name == "basic"
assert sec_schemes[0].type == "Basic Authentication"
assert sec_schemes[1].name == "oauth_2_0"
assert sec_schemes[1].type == "OAuth 2.0"
desc = ("OAuth2 is a protocol that lets external apps request "
"authorization to private\ndetails in a user's GitHub "
"account without getting their password. This is\npreferred "
"over Basic Authentication because tokens can be limited to "
"specific\ntypes of data, and can be revoked by users at "
"any time.\n")
assert sec_schemes[1].description.raw == desc
assert len(sec_schemes[1].headers) == 1
assert len(sec_schemes[1].query_params) == 1
assert len(sec_schemes[1].responses) == 1
assert sec_schemes[1].headers[0].name == "Authorization"
assert sec_schemes[1].headers[0].type == "string"
desc = ("Used to send a valid OAuth 2 access token. Do not use together "
"with\nthe \"access_token\" query string parameter.\n")
assert sec_schemes[1].headers[0].description.raw == desc
assert sec_schemes[1].query_params[0].name == "access_token"
assert sec_schemes[1].query_params[0].type == "string"
desc = ("Used to send a valid OAuth 2 access token. Do not use "
"together with\nthe \"Authorization\" header\n")
assert sec_schemes[1].query_params[0].description.raw == desc
assert sec_schemes[1].responses[0].code == 404
assert sec_schemes[1].responses[0].description.raw == "Unauthorized"
settings = {
"authorizationUri": "https://github.com/login/oauth/authorize",
"accessTokenUri": "https://github.com/login/oauth/access_token",
"authorizationGrants": ["code"],
"scopes": [
"user",
"user:email",
"user:follow",
"public_repo",
"repo",
"repo:status",
"delete_repo",
"notifications",
"gist"
]
}
assert sec_schemes[1].settings == settings
#####
# Traits
#####
@pytest.fixture(scope="session")
def traits():
raml_file = os.path.join(EXAMPLES, "github.raml")
config = os.path.join(EXAMPLES, "github-config.ini")
api = parse(raml_file, config)
return api.traits
def test_create_traits(traits):
for trait in traits:
assert isinstance(trait, TraitNode)
assert len(traits) == 2
def test_trait_historical(traits):
trait = traits[0]
assert trait.form_params is None
assert trait.uri_params is None
assert trait.headers is None
assert trait.body is None
assert trait.responses is None
assert len(trait.query_params) == 1
assert trait.name == "historical"
assert trait.query_params[0].name == "since"
assert trait.query_params[0].type == "string"
desc = ("Timestamp in ISO 8601 format YYYY-MM-DDTHH:MM:SSZ.\n"
"Only gists updated at or after this time are returned.\n")
assert trait.query_params[0].description.raw == desc
def test_trait_filterable(traits):
trait = traits[1]
assert trait.name == "filterable"
assert trait.form_params is None
assert trait.uri_params is None
assert trait.headers is None
assert trait.body is None
assert trait.responses is None
assert len(trait.query_params) == 6
filter_ = trait.query_params[0]
assert filter_.name == "filter"
assert filter_.enum == [
"assigned", "created", "mentioned", "subscribed", "all"
]
assert filter_.default == "all"
assert filter_.required
assert filter_.type == "string"
desc = ("Issues assigned to you / created by you / mentioning you / "
"you're\nsubscribed to updates for / All issues the authenticated "
"user can see\n")
assert filter_.description.raw == desc
state = trait.query_params[1]
assert state.name == "state"
assert state.enum == ["open", "closed"]
assert state.default == "open"
assert state.required
assert state.type == "string"
assert state.description is None
assert not hasattr(state.description, "raw")
assert not hasattr(state.description, "html")
label = trait.query_params[2]
assert label.name == "labels"
assert label.type == "string"
assert label.required
desc = ("String list of comma separated Label names. Example - "
"bug,ui,@high.")
assert label.description.raw == desc
sort = trait.query_params[3]
assert sort.name == "sort"
assert sort.enum == ["created", "updated", "comments"]
assert sort.default == "created"
assert sort.required
assert sort.type == "string"
assert sort.description is None
assert not hasattr(sort.description, "raw")
assert not hasattr(sort.description, "html")
direction = trait.query_params[4]
assert direction.name == "direction"
assert direction.enum == ["asc", "desc"]
assert direction.default == "desc"
assert direction.required
assert direction.description is None
assert not hasattr(direction.description, "raw")
assert not hasattr(direction.description, "html")
since = trait.query_params[5]
assert since.name == "since"
assert since.type == "string"
desc = ("Optional string of a timestamp in ISO 8601 format: "
"YYYY-MM-DDTHH:MM:SSZ.\nOnly issues updated at or after this "
"time are returned.\n")
assert since.description.raw == desc
#####
# Resource Types
#####
@pytest.fixture(scope="session")
def resource_types():
raml_file = os.path.join(EXAMPLES, "github.raml")
config = os.path.join(EXAMPLES, "github-config.ini")
api = parse(raml_file, config)
return api.resource_types
def test_create_resource_types(resource_types):
for r in resource_types:
assert isinstance(r, ResourceTypeNode)
assert len(resource_types) == 12
def test_resource_type_get_base(resource_types):
res = resource_types[0]
assert res.method == "get"
assert len(res.headers) == 6
assert len(res.responses) == 1
assert res.body is None
assert res.query_params is None
assert res.form_params is None
assert res.uri_params is None
assert res.optional
header = res.headers[0]
assert header.name == "X-GitHub-Media-Type"
assert header.type == "string"
desc = "You can check the current version of media type in responses.\n"
assert header.description.raw == desc
header = res.headers[1]
assert header.name == "Accept"
assert header.type == "string"
assert header.description.raw == "Is used to set specified media type."
header = res.headers[2]
assert header.name == "X-RateLimit-Limit"
assert header.type == "integer"
header = res.headers[3]
assert header.name == "X-RateLimit-Remaining"
assert header.type == "integer"
header = res.headers[4]
assert header.name == "X-RateLimit-Reset"
assert header.type == "integer"
header = res.headers[5]
assert header.name == "X-GitHub-Request-Id"
assert header.type == "integer"
assert res.responses[0].code == 403
desc = ("API rate limit exceeded. "
"See http://developer.github.com/v3/#rate-limiting\nfor "
"details.\n")
assert res.responses[0].description.raw == desc
def test_resource_type_post_base(resource_types):
res = resource_types[1]
assert res.method == "post"
assert len(res.headers) == 6
assert len(res.responses) == 1
assert res.body is None
assert res.query_params is None
assert res.form_params is None
assert res.uri_params is None
assert res.optional
header = res.headers[0]
assert header.name == "X-GitHub-Media-Type"
assert header.type == "string"
desc = "You can check the current version of media type in responses.\n"
assert header.description.raw == desc
header = res.headers[1]
assert header.name == "Accept"
assert header.type == "string"
assert header.description.raw == "Is used to set specified media type."
header = res.headers[2]
assert header.name == "X-RateLimit-Limit"
assert header.type == "integer"
header = res.headers[3]
assert header.name == "X-RateLimit-Remaining"
assert header.type == "integer"
header = res.headers[4]
assert header.name == "X-RateLimit-Reset"
assert header.type == "integer"
header = res.headers[5]
assert header.name == "X-GitHub-Request-Id"
assert header.type == "integer"
assert res.responses[0].code == 403
desc = ("API rate limit exceeded. "
"See http://developer.github.com/v3/#rate-limiting\nfor "
"details.\n")
assert res.responses[0].description.raw == desc
def test_resource_type_patch_base(resource_types):
res = resource_types[2]
assert res.method == "patch"
assert len(res.headers) == 6
assert len(res.responses) == 1
assert res.body is None
assert res.query_params is None
assert res.form_params is None
assert res.uri_params is None
assert res.optional
header = res.headers[0]
assert header.name == "X-GitHub-Media-Type"
assert header.type == "string"
desc = "You can check the current version of media type in responses.\n"
assert header.description.raw == desc
header = res.headers[1]
assert header.name == "Accept"
assert header.type == "string"
assert header.description.raw == "Is used to set specified media type."
header = res.headers[2]
assert header.name == "X-RateLimit-Limit"
assert header.type == "integer"
header = res.headers[3]
assert header.name == "X-RateLimit-Remaining"
assert header.type == "integer"
header = res.headers[4]
assert header.name == "X-RateLimit-Reset"
assert header.type == "integer"
header = res.headers[5]
assert header.name == "X-GitHub-Request-Id"
assert header.type == "integer"
assert res.responses[0].code == 403
desc = ("API rate limit exceeded. "
"See http://developer.github.com/v3/#rate-limiting\nfor "
"details.\n")
assert res.responses[0].description.raw == desc
def test_resource_type_put_base(resource_types):
res = resource_types[3]
assert res.method == "put"
assert len(res.headers) == 6
assert len(res.responses) == 1
assert res.body is None
assert res.query_params is None
assert res.form_params is None
assert res.uri_params is None
assert res.optional
header = res.headers[0]
assert header.name == "X-GitHub-Media-Type"
assert header.type == "string"
desc = "You can check the current version of media type in responses.\n"
assert header.description.raw == desc
header = res.headers[1]
assert header.name == "Accept"
assert header.type == "string"
assert header.description.raw == "Is used to set specified media type."
header = res.headers[2]
assert header.name == "X-RateLimit-Limit"
assert header.type == "integer"
header = res.headers[3]
assert header.name == "X-RateLimit-Remaining"
assert header.type == "integer"
header = res.headers[4]
assert header.name == "X-RateLimit-Reset"
assert header.type == "integer"
header = res.headers[5]
assert header.name == "X-GitHub-Request-Id"
assert header.type == "integer"
assert res.responses[0].code == 403
desc = ("API rate limit exceeded. "
"See http://developer.github.com/v3/#rate-limiting\nfor "
"details.\n")
assert res.responses[0].description.raw == desc
def test_resource_type_delete_base(resource_types):
res = resource_types[4]
assert res.method == "delete"
assert len(res.headers) == 6
assert len(res.responses) == 1
assert res.body is None
assert res.query_params is None
assert res.form_params is None
assert res.uri_params is None
assert res.optional
header = res.headers[0]
assert header.name == "X-GitHub-Media-Type"
assert header.type == "string"
desc = "You can check the current version of media type in responses.\n"
assert header.description.raw == desc
header = res.headers[1]
assert header.name == "Accept"
assert header.type == "string"
assert header.description.raw == "Is used to set specified media type."
header = res.headers[2]
assert header.name == "X-RateLimit-Limit"
assert header.type == "integer"
header = res.headers[3]
assert header.name == "X-RateLimit-Remaining"
assert header.type == "integer"
header = res.headers[4]
assert header.name == "X-RateLimit-Reset"
assert header.type == "integer"
header = res.headers[5]
assert header.name == "X-GitHub-Request-Id"
assert header.type == "integer"
assert res.responses[0].code == 403
desc = ("API rate limit exceeded. "
"See http://developer.github.com/v3/#rate-limiting\nfor "
"details.\n")
assert res.responses[0].description.raw == desc
def test_resource_type_get_item(resource_types):
res = resource_types[5]
assert res.name == "item"
assert res.type == "base"
assert res.method == "get"
assert len(res.headers) == 6
assert len(res.responses) == 1
assert res.body is None
assert res.query_params is None
assert res.form_params is None
assert res.uri_params is None
assert res.optional
def _sort_headers(item):
return item.name
headers = sorted(res.headers, key=_sort_headers)
header = headers[0]
assert header.name == "Accept"
assert header.type == "string"
assert header.description.raw == "Is used to set specified media type."
header = headers[1]
assert header.name == "X-GitHub-Media-Type"
assert header.type == "string"
desc = "You can check the current version of media type in responses.\n"
assert header.description.raw == desc
header = headers[2]
assert header.name == "X-GitHub-Request-Id"
assert header.type == "integer"
header = headers[3]
assert header.name == "X-RateLimit-Limit"
assert header.type == "integer"
header = headers[4]
assert header.name == "X-RateLimit-Remaining"
assert header.type == "integer"
header = headers[5]
assert header.name == "X-RateLimit-Reset"
assert header.type == "integer"
assert res.responses[0].code == 403
desc = ("API rate limit exceeded. "
"See http://developer.github.com/v3/#rate-limiting\nfor "
"details.\n")
assert res.responses[0].description.raw == desc
| {
"content_hash": "199ee50815607c0825258bdc6d7d3b09",
"timestamp": "",
"source": "github",
"line_count": 552,
"max_line_length": 79,
"avg_line_length": 30.032608695652176,
"alnum_prop": 0.6545421643141512,
"repo_name": "jmagnusson/ramlfications",
"id": "1d9ed07fd9c82171d7971cd28dcc12bc5a0876ad",
"size": "16657",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/integration/test_github.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "252240"
},
{
"name": "RAML",
"bytes": "2777980"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.test import TestCase
from rest_framework.status import (
is_informational, is_success, is_redirect, is_client_error, is_server_error
)
class TestStatus(TestCase):
def test_status_categories(self):
self.assertFalse(is_informational(99))
self.assertTrue(is_informational(100))
self.assertTrue(is_informational(199))
self.assertFalse(is_informational(200))
self.assertFalse(is_success(199))
self.assertTrue(is_success(200))
self.assertTrue(is_success(299))
self.assertFalse(is_success(300))
self.assertFalse(is_redirect(299))
self.assertTrue(is_redirect(300))
self.assertTrue(is_redirect(399))
self.assertFalse(is_redirect(400))
self.assertFalse(is_client_error(399))
self.assertTrue(is_client_error(400))
self.assertTrue(is_client_error(499))
self.assertFalse(is_client_error(500))
self.assertFalse(is_server_error(499))
self.assertTrue(is_server_error(500))
self.assertTrue(is_server_error(599))
self.assertFalse(is_server_error(600))
| {
"content_hash": "95806d1e7d6061c8982411ad3faa54c4",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 79,
"avg_line_length": 35.15151515151515,
"alnum_prop": 0.678448275862069,
"repo_name": "MJafarMashhadi/django-rest-framework",
"id": "721a6e30b719c9e4e47c2df3653a441a970d7fd4",
"size": "1160",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/test_status.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "9872"
},
{
"name": "HTML",
"bytes": "62160"
},
{
"name": "JavaScript",
"bytes": "1769"
},
{
"name": "Python",
"bytes": "817448"
}
],
"symlink_target": ""
} |
import sys
from pyasn1.codec.der.decoder import decode as der_decode
from pyasn1.codec.der.encoder import encode as der_encode
from pyasn1_modules import pem
from pyasn1_modules import rfc5915
from pyasn1_modules import rfc5480
try:
import unittest2 as unittest
except ImportError:
import unittest
class MUDCertTestCase(unittest.TestCase):
private_key_pem_text = """\
MIGkAgEBBDDLjzGbbLrR3T13lrrVum7WC/4Ua4Femc1RhhNVe1Q5XsArQ33kn9kx
3lOUfOcG+qagBwYFK4EEACKhZANiAAT4zZ8HL+xEDpXWkoWp5xFMTz4u4Ae1nF6z
XCYlmsEGD5vPu5hl9hDEjd1UHRgJIPoy3fJcWWeZ8FHCirICtuMgFisNscG/aTwK
yDYOFDuqz/C2jyEwqgWCRyxyohuJXtk=
"""
def setUp(self):
self.asn1Spec = rfc5915.ECPrivateKey()
def testDerCodec(self):
substrate = pem.readBase64fromText(self.private_key_pem_text)
asn1Object, rest = der_decode(substrate, asn1Spec=self.asn1Spec)
assert not rest
assert asn1Object.prettyPrint()
assert der_encode(asn1Object) == substrate
assert asn1Object['parameters']['namedCurve'] == rfc5480.secp384r1
suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
if __name__ == '__main__':
import sys
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(not result.wasSuccessful())
| {
"content_hash": "8f60ade0152b034683d0dbad0d876d71",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 74,
"avg_line_length": 29.53488372093023,
"alnum_prop": 0.75748031496063,
"repo_name": "cloudera/hue",
"id": "bcb81981432edf3a9aefa922b6aa325beba57bbf",
"size": "1444",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/pyasn1-modules-0.2.6/tests/test_rfc5915.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
} |
"""
Network examples
Create all the pieces parts to have a working network.
To run:
python examples/network/create.py
"""
import sys
from examples import common
from examples import connection
def create(conn, name, opts, ports_to_open=[80, 22]):
dns_nameservers = opts.data.pop('dns_nameservers', '206.164.176.34')
cidr = opts.data.pop('cidr', '10.3.3.0/24')
network = conn.network.find_network(name)
if network is None:
network = conn.network.create_network(name=name)
print(str(network))
subnet = conn.network.find_subnet(name)
if subnet is None:
args = {
"name": name,
"network_id": network.id,
"ip_version": "4",
"dns_nameservers": [dns_nameservers],
"cidr": cidr,
}
subnet = conn.network.create_subnet(**args)
print(str(subnet))
extnet = conn.network.find_network("Ext-Net")
router = conn.network.find_router(name)
if router is None:
args = {
"name": name,
"external_gateway_info": {"network_id": extnet.id}
}
router = conn.network.create_router(**args)
conn.network.router_add_interface(router, subnet.id)
print(str(router))
sg = conn.network.find_security_group(name)
if sg is None:
sg = conn.network.create_security_group(name=name)
for port in ports_to_open:
conn.network.security_group_open_port(sg.id, port)
conn.network.security_group_allow_ping(sg.id)
print(str(sg))
return network
def run_network(opts):
name = opts.data.pop('name', 'netty')
conn = connection.make_connection(opts)
return(create(conn, name, opts))
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_network))
| {
"content_hash": "06b436b10a89f7ee1eb231a8ddb81be2",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 72,
"avg_line_length": 26.970149253731343,
"alnum_prop": 0.6142778085224129,
"repo_name": "dudymas/python-openstacksdk",
"id": "e948ccc8a51f10108ac3879164c846742f42f31e",
"size": "2353",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/network/create.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1062098"
},
{
"name": "Shell",
"bytes": "1383"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.