text stringlengths 4 1.02M | meta dict |
|---|---|
from django.test import TestCase
from django.contrib.auth.models import User
from .models import UserSecret
class UserRegistrationTestCase(TestCase):
def setUp(self):
print ">>>creating an instance of User"
User.objects.create(username='test_username', password='test_password', email='me@iamkel.com', first_name='kel', last_name='testme',)
def test_user_is_exists(self):
test_user = User.objects.get(username="test_username")
self.assertEqual(test_user.password, 'test_password')
def test_user_has_secret(self):
test_user = User.objects.get(username="test_username")
self.assertTrue(test_user.secret.key)
print ">>>secret:%s" % test_user.secret.key
self.assertTrue(test_user.secret.key)
print ">>>verified:%s" % test_user.secret.is_verified
| {
"content_hash": "d28f8db94fa41a6138a3960260495f7a",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 135,
"avg_line_length": 33.52173913043478,
"alnum_prop": 0.7405966277561609,
"repo_name": "michaelhenry/codebehind",
"id": "d81111c95618ff196719e1c2db88d08a8aabb282",
"size": "771",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "codebehind/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13562"
}
],
"symlink_target": ""
} |
from pyspark.streaming.dstream import DStream
from pyspark.streaming.util import TransformFunction
from pyspark.sql.snappy import SnappyContext
from pyspark.sql.types import StructType
class SchemaDStream(DStream):
"""
A SQL based DStream with support for schema/Product
This class offers the ability to manipulate SQL query on DStreams
It is similar to SchemaRDD, which offers the similar functions
Internally, RDD of each batch duration is treated as a small
table and CQs are evaluated on those small tables
Some of the abstraction and code is borrowed from the project:
https://github.com/Intel-bigdata/spark-streamingsql
@param snsc
@param queryExecution
"""
def __init__(self, jdstream, ssc, jrdd_deserializer, schema):
DStream.__init__(self, jdstream, ssc, jrdd_deserializer)
self._schema = schema
self._sqlcontext = SnappyContext(self._sc)
def foreachDataFrame(self, func):
def createDataFrame(_, rdd):
df = self._sqlcontext.createDataFrame(rdd, self._schema)
func(df)
self.foreachRDD(createDataFrame)
| {
"content_hash": "106a54e8764916c5de5ee371e357455b",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 70,
"avg_line_length": 38.03333333333333,
"alnum_prop": 0.7099035933391762,
"repo_name": "vjr/snappydata",
"id": "ef63418896005ea113769986865ff44b19126549",
"size": "1781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/pyspark/streaming/snappy/snappydstream.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5867"
},
{
"name": "C++",
"bytes": "2104"
},
{
"name": "Java",
"bytes": "579527"
},
{
"name": "Python",
"bytes": "49161"
},
{
"name": "Scala",
"bytes": "3230053"
},
{
"name": "Shell",
"bytes": "80012"
}
],
"symlink_target": ""
} |
import sys
"""
functionモジュールのsensor_update関数をテストする
"""
sys.path.append('../PFoE/functions')
sys.path.append('../PFoE/class_definition')
from functions import sensor_update
import class_definition
mouse1=class_definition.Robot(sensor=4,choice=3)
| {
"content_hash": "9d2cfbee0106b7616de4b490fa8f71e6",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 48,
"avg_line_length": 20.75,
"alnum_prop": 0.7751004016064257,
"repo_name": "kato-masahiro/particle_filter_on_episode",
"id": "9c04eb1b4b1c357db12053fd258ae859bca7b59d",
"size": "291",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_sensor_update.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28334"
},
{
"name": "Shell",
"bytes": "818"
}
],
"symlink_target": ""
} |
SPEW_SCROLL_RATE = 1
import sys
import os
import time
import signal
import random
import socket
import hashlib
import threading
from BitTornado.Client.download_bt1 import BT1Download, defaults, \
parse_params, get_usage, get_metainfo
from BitTornado.Network.RawServer import RawServer
from BitTornado.Network.SocketHandler import UPnP_ERROR
from BitTornado.Meta.bencode import bencode
from BitTornado.Network.natpunch import UPnP_test
from BitTornado.clock import clock
from BitTornado import version
from BitTornado.Application.ConfigDir import ConfigDir
from BitTornado.Application.NumberFormats import formatIntClock, formatSize
from BitTornado.Application.PeerID import createPeerID
try:
import curses
import curses.panel
except ImportError:
print('Textmode GUI initialization failed, cannot proceed.')
print()
print('This download interface requires the standard Python module '
'"curses", which is unfortunately not available for the native '
'Windows port of Python. It is however available for the Cygwin '
'port of Python, running on all Win32 systems (www.cygwin.com).')
print()
print('You may still use "btdownloadheadless.py" to download.')
sys.exit(1)
class CursesDisplayer:
def __init__(self, scrwin, errlist, doneflag):
self.scrwin = scrwin
self.errlist = errlist
self.doneflag = doneflag
signal.signal(signal.SIGWINCH, self.winch_handler)
self.changeflag = threading.Event()
self.done = 0
self.file = ''
self.fileSize = ''
self.activity = ''
self.status = ''
self.progress = ''
self.downloadTo = ''
self.downRate = '---'
self.upRate = '---'
self.shareRating = ''
self.seedStatus = ''
self.peerStatus = ''
self.errors = []
self.last_update_time = 0
self.spew_scroll_time = 0
self.spew_scroll_pos = 0
self._remake_window()
def winch_handler(self, signum, stackframe):
self.changeflag.set()
curses.endwin()
self.scrwin.refresh()
self.scrwin = curses.newwin(0, 0, 0, 0)
self._remake_window()
def _remake_window(self):
self.scrh, self.scrw = self.scrwin.getmaxyx()
self.scrpan = curses.panel.new_panel(self.scrwin)
self.labelh, self.labelw, self.labely, self.labelx = 11, 9, 1, 2
self.labelwin = curses.newwin(self.labelh, self.labelw,
self.labely, self.labelx)
self.labelpan = curses.panel.new_panel(self.labelwin)
self.fieldh, self.fieldw, self.fieldy, self.fieldx = (
self.labelh, (self.scrw - 2) - (self.labelw - 3), 1,
self.labelw + 3)
self.fieldwin = curses.newwin(self.fieldh, self.fieldw,
self.fieldy, self.fieldx)
self.fieldwin.nodelay(1)
self.fieldpan = curses.panel.new_panel(self.fieldwin)
self.spewh, self.speww, self.spewy, self.spewx = (
self.scrh - self.labelh - 2, self.scrw - 3, 1 + self.labelh, 2)
self.spewwin = curses.newwin(self.spewh, self.speww,
self.spewy, self.spewx)
self.spewpan = curses.panel.new_panel(self.spewwin)
try:
self.scrwin.border(*map(ord, '||-- '))
except Exception:
pass
self.labelwin.addstr(0, 0, 'file:')
self.labelwin.addstr(1, 0, 'size:')
self.labelwin.addstr(2, 0, 'dest:')
self.labelwin.addstr(3, 0, 'progress:')
self.labelwin.addstr(4, 0, 'status:')
self.labelwin.addstr(5, 0, 'dl speed:')
self.labelwin.addstr(6, 0, 'ul speed:')
self.labelwin.addstr(7, 0, 'sharing:')
self.labelwin.addstr(8, 0, 'seeds:')
self.labelwin.addstr(9, 0, 'peers:')
curses.panel.update_panels()
curses.doupdate()
self.changeflag.clear()
def finished(self):
self.done = 1
self.activity = 'download succeeded!'
self.downRate = '---'
self.display(fractionDone=1)
def failed(self):
self.done = 1
self.activity = 'download failed!'
self.downRate = '---'
self.display()
def error(self, errormsg):
newerrmsg = time.strftime('[%H:%M:%S] ') + errormsg
self.errors.append(newerrmsg)
self.errlist.append(newerrmsg)
self.display()
def display(self, dpflag=threading.Event(), fractionDone=None,
timeEst=None, downRate=None, upRate=None, activity=None,
statistics=None, spew=None, **kwargs):
inchar = self.fieldwin.getch()
if inchar == 12: # ^L
self._remake_window()
elif inchar in (ord('q'), ord('Q')):
self.doneflag.set()
if activity is not None and not self.done:
self.activity = activity
elif timeEst == 0:
self.activity = 'download complete!'
elif timeEst is not None:
self.activity = 'finishing in ' + formatIntClock(timeEst)
if self.changeflag.is_set() or \
self.last_update_time + 0.1 > clock() and \
fractionDone not in (0.0, 1.0) and \
activity is not None:
return
self.last_update_time = clock()
if fractionDone is not None:
blocknum = int(self.fieldw * fractionDone)
self.progress = blocknum * '#' + (self.fieldw - blocknum) * '_'
self.status = '%s (%.1f%%)' % (self.activity, fractionDone * 100)
else:
self.status = self.activity
if downRate is not None:
self.downRate = '%.1f KB/s' % (float(downRate) / (1 << 10))
if upRate is not None:
self.upRate = '%.1f KB/s' % (float(upRate) / (1 << 10))
if statistics is not None:
self.shareRating = '{} ({:.1f} MB up / {:.1f} MB down)'.format(
'{:.3f}'.format(statistics.shareRating)
if 0 <= statistics.shareRating <= 100 else 'oo',
float(statistics.upTotal) / (1 << 20),
float(statistics.downTotal) / (1 << 20))
if self.done:
seeds = '{:d} seen recently, '.format(statistics.numOldSeeds)
copies = 'plus {:.3f} distributed copies'.format(
round(statistics.numCopies, 3))
else:
seeds = '{:d} seen now, '.format(statistics.numSeeds)
copies = 'plus {:.3f} distributed copies'.format(
round(statistics.numCopies2, 3))
self.seedStatus = seeds + copies
self.peerStatus = '{:d} seen now, {:.1f}% done at {:.1f} kB/s' \
''.format(statistics.numPeers, statistics.percentDone,
float(statistics.torrentRate) / (1 << 10))
self.fieldwin.erase()
self.fieldwin.addnstr(0, 0, self.file, self.fieldw, curses.A_BOLD)
self.fieldwin.addnstr(1, 0, self.fileSize, self.fieldw)
self.fieldwin.addnstr(2, 0, self.downloadTo, self.fieldw)
if self.progress:
self.fieldwin.addnstr(3, 0, self.progress, self.fieldw,
curses.A_BOLD)
self.fieldwin.addnstr(4, 0, self.status, self.fieldw)
self.fieldwin.addnstr(5, 0, self.downRate, self.fieldw)
self.fieldwin.addnstr(6, 0, self.upRate, self.fieldw)
self.fieldwin.addnstr(7, 0, self.shareRating, self.fieldw)
self.fieldwin.addnstr(8, 0, self.seedStatus, self.fieldw)
self.fieldwin.addnstr(9, 0, self.peerStatus, self.fieldw)
self.spewwin.erase()
if not spew:
errsize = self.spewh
if self.errors:
self.spewwin.addnstr(0, 0, "error(s):", self.speww,
curses.A_BOLD)
errsize = len(self.errors)
displaysize = min(errsize, self.spewh)
displaytop = errsize - displaysize
for i in range(displaysize):
self.spewwin.addnstr(
i, self.labelw, self.errors[displaytop + i],
self.speww - self.labelw - 1, curses.A_BOLD)
else:
if self.errors:
self.spewwin.addnstr(0, 0, "error:", self.speww, curses.A_BOLD)
self.spewwin.addnstr(
0, self.labelw, self.errors[-1],
self.speww - self.labelw - 1, curses.A_BOLD)
self.spewwin.addnstr(
2, 0, ' # IP Upload Download '
' Completed Speed', self.speww, curses.A_BOLD)
if self.spew_scroll_time + SPEW_SCROLL_RATE < clock():
self.spew_scroll_time = clock()
if len(spew) > self.spewh - 5 or self.spew_scroll_pos > 0:
self.spew_scroll_pos += 1
if self.spew_scroll_pos > len(spew):
self.spew_scroll_pos = 0
for i, subspew in enumerate(spew, 1):
subspew['lineno'] = i
spew.append({'lineno': None})
spew = spew[self.spew_scroll_pos:] + spew[:self.spew_scroll_pos]
for i in range(min(self.spewh - 5, len(spew))):
if not spew[i]['lineno']:
continue
self.spewwin.addnstr(i + 3, 0, '%3d' % spew[i]['lineno'], 3)
self.spewwin.addnstr(i + 3, 4,
spew[i]['ip'] + spew[i]['direction'], 16)
if spew[i]['uprate'] > 100:
self.spewwin.addnstr(i + 3, 20, '{:6.0f} KB/s'.format(
float(spew[i]['uprate']) / 1000), 11)
self.spewwin.addnstr(i + 3, 32, '-----', 5)
if spew[i]['uinterested']:
self.spewwin.addnstr(i + 3, 33, 'I', 1)
if spew[i]['uchoked']:
self.spewwin.addnstr(i + 3, 35, 'C', 1)
if spew[i]['downrate'] > 100:
self.spewwin.addnstr(i + 3, 38, '{:6.0f} KB/s'.format(
float(spew[i]['downrate']) / 1000), 11)
self.spewwin.addnstr(i + 3, 50, '-------', 7)
if spew[i]['dinterested']:
self.spewwin.addnstr(i + 3, 51, 'I', 1)
if spew[i]['dchoked']:
self.spewwin.addnstr(i + 3, 53, 'C', 1)
if spew[i]['snubbed'] == 1:
self.spewwin.addnstr(i + 3, 55, 'S', 1)
self.spewwin.addnstr(i + 3, 58, '{:6.1%}'.format(
spew[i]['completed']), 6)
if spew[i]['speed'] is not None:
self.spewwin.addnstr(i + 3, 64, '{:5.0f} KB/s'.format(
float(spew[i]['speed']) / 1000), 10)
if statistics is not None:
self.spewwin.addnstr(
self.spewh - 1, 0, 'downloading {:d} pieces, have {:d} '
'fragments, {:d} of {:d} pieces completed'.format(
statistics.storage_active,
statistics.storage_dirty,
statistics.storage_numcomplete,
statistics.storage_totalpieces),
self.speww - 1)
curses.panel.update_panels()
curses.doupdate()
dpflag.set()
def chooseFile(self, default, size, saveas, isdir):
self.file = default
self.fileSize = formatSize(size)
if saveas == '':
saveas = default
self.downloadTo = os.path.abspath(saveas)
return saveas
def run(scrwin, errlist, params):
doneflag = threading.Event()
d = CursesDisplayer(scrwin, errlist, doneflag)
try:
while 1:
configdir = ConfigDir('downloadcurses')
defaultsToIgnore = ['metafile', 'url', 'priority']
configdir.setDefaults(defaults, defaultsToIgnore)
configdefaults = configdir.loadConfig()
defaults.append(
('save_options', 0, 'whether to save the current options as '
'the new default configuration (only for btdownloadcurses.py)'
))
try:
config = parse_params(params, configdefaults)
except ValueError as e:
d.error('error: {}\nrun with no args for parameter '
'explanations'.format(e))
break
if not config:
d.error(get_usage(defaults, d.fieldw, configdefaults))
break
if config['save_options']:
configdir.saveConfig(config)
configdir.deleteOldCacheData(config['expire_cache_data'])
myid = createPeerID()
random.seed(myid)
rawserver = RawServer(
doneflag, config['timeout_check_interval'], config['timeout'],
ipv6_enable=config['ipv6_enabled'], failfunc=d.failed,
errorfunc=d.error)
upnp_type = UPnP_test(config['upnp_nat_access'])
while True:
try:
listen_port = rawserver.find_and_bind(
config['minport'], config['maxport'], config['bind'],
ipv6_socket_style=config['ipv6_binds_v4'],
upnp=upnp_type, randomizer=config['random_port'])
break
except socket.error as e:
if upnp_type and e == UPnP_ERROR:
d.error('WARNING: COULD NOT FORWARD VIA UPnP')
upnp_type = 0
continue
d.error("Couldn't listen - " + str(e))
d.failed()
return
metainfo = get_metainfo(config['metafile'], config['url'], d.error)
if not metainfo:
break
infohash = hashlib.sha1(bencode(metainfo['info'])).digest()
dow = BT1Download(
d.display, d.finished, d.error, d.error, doneflag, config,
metainfo, infohash, myid, rawserver, listen_port, configdir)
if not dow.saveAs(d.chooseFile):
break
if not dow.initFiles(old_style=True):
break
if not dow.startEngine():
dow.shutdown()
break
dow.startRerequester()
dow.autoStats()
if not dow.am_I_finished():
d.display(activity='connecting to peers')
rawserver.listen_forever(dow.getPortHandler())
d.display(activity='shutting down')
dow.shutdown()
break
except KeyboardInterrupt:
# ^C to exit...
pass
try:
rawserver.shutdown()
except Exception:
pass
if not d.done:
d.failed()
if __name__ == '__main__':
if sys.argv[1:] == ['--version']:
print(version)
sys.exit(0)
if len(sys.argv) <= 1:
print("Usage: btdownloadcurses.py <global options>\n")
print(get_usage(defaults))
sys.exit(1)
errlist = []
curses.wrapper(run, errlist, sys.argv[1:])
if errlist:
print("These errors occurred during execution:")
for error in errlist:
print(error)
| {
"content_hash": "102ca89a0df480a7f8863a849c16bdd1",
"timestamp": "",
"source": "github",
"line_count": 387,
"max_line_length": 79,
"avg_line_length": 39.92506459948321,
"alnum_prop": 0.5310335900588958,
"repo_name": "jakesyl/BitTornado",
"id": "f82d470bac480eedf4d34eecf91b0814fab79cc8",
"size": "15548",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "btdownloadcurses.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "634"
},
{
"name": "Python",
"bytes": "510409"
}
],
"symlink_target": ""
} |
from __future__ import with_statement
import os
import sys
from datetime import datetime
from libcloud.utils.iso8601 import UTC
from libcloud.utils.py3 import httplib
from libcloud.compute.drivers.ec2 import EC2NodeDriver
from libcloud.compute.drivers.ec2 import EC2USWestNodeDriver
from libcloud.compute.drivers.ec2 import EC2USWestOregonNodeDriver
from libcloud.compute.drivers.ec2 import EC2EUNodeDriver
from libcloud.compute.drivers.ec2 import EC2APSENodeDriver
from libcloud.compute.drivers.ec2 import EC2APNENodeDriver
from libcloud.compute.drivers.ec2 import EC2APSESydneyNodeDriver
from libcloud.compute.drivers.ec2 import EC2SAEastNodeDriver
from libcloud.compute.drivers.ec2 import EC2PlacementGroup
from libcloud.compute.drivers.ec2 import NimbusNodeDriver, EucNodeDriver
from libcloud.compute.drivers.ec2 import OutscaleSASNodeDriver
from libcloud.compute.drivers.ec2 import IdempotentParamError
from libcloud.compute.drivers.ec2 import REGION_DETAILS
from libcloud.compute.drivers.ec2 import ExEC2AvailabilityZone
from libcloud.compute.drivers.ec2 import EC2NetworkSubnet
from libcloud.compute.base import Node, NodeImage, NodeSize, NodeLocation
from libcloud.compute.base import StorageVolume, VolumeSnapshot
from libcloud.compute.types import KeyPairDoesNotExistError, StorageVolumeState, \
VolumeSnapshotState
from libcloud.test import MockHttpTestCase, LibcloudTestCase
from libcloud.test.compute import TestCaseMixin
from libcloud.test.file_fixtures import ComputeFileFixtures
from libcloud.test import unittest
from libcloud.test.secrets import EC2_PARAMS
null_fingerprint = '00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:' + \
'00:00:00:00:00'
class BaseEC2Tests(LibcloudTestCase):
def test_instantiate_driver_valid_regions(self):
regions = REGION_DETAILS.keys()
regions = [d for d in regions if d != 'nimbus']
region_endpoints = [
EC2NodeDriver(*EC2_PARAMS, **{'region': region}).connection.host for region in regions
]
# Verify that each driver doesn't get the same API host endpoint
self.assertEqual(len(region_endpoints),
len(set(region_endpoints)),
"Multiple Region Drivers were given the same API endpoint")
def test_instantiate_driver_invalid_regions(self):
for region in ['invalid', 'nimbus']:
try:
EC2NodeDriver(*EC2_PARAMS, **{'region': region})
except ValueError:
pass
else:
self.fail('Invalid region, but exception was not thrown')
class EC2Tests(LibcloudTestCase, TestCaseMixin):
image_name = 'ec2-public-images/fedora-8-i386-base-v1.04.manifest.xml'
region = 'us-east-1'
def setUp(self):
EC2MockHttp.test = self
EC2NodeDriver.connectionCls.conn_classes = (None, EC2MockHttp)
EC2MockHttp.use_param = 'Action'
EC2MockHttp.type = None
self.driver = EC2NodeDriver(*EC2_PARAMS,
**{'region': self.region})
def test_create_node(self):
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
node = self.driver.create_node(name='foo', image=image, size=size)
self.assertEqual(node.id, 'i-2ba64342')
self.assertEqual(node.name, 'foo')
self.assertEqual(node.extra['tags']['Name'], 'foo')
self.assertEqual(len(node.extra['tags']), 1)
def test_create_node_with_ex_mincount(self):
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
node = self.driver.create_node(name='foo', image=image, size=size,
ex_mincount=1, ex_maxcount=10)
self.assertEqual(node.id, 'i-2ba64342')
self.assertEqual(node.name, 'foo')
self.assertEqual(node.extra['tags']['Name'], 'foo')
self.assertEqual(len(node.extra['tags']), 1)
def test_create_node_with_ex_assign_public_ip(self):
# assertions are done in _create_ex_assign_public_ip_RunInstances
EC2MockHttp.type = 'create_ex_assign_public_ip'
image = NodeImage(id='ami-11111111',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
subnet = EC2NetworkSubnet('subnet-11111111', "test_subnet", "pending")
self.driver.create_node(
name='foo',
image=image,
size=size,
ex_subnet=subnet,
ex_security_group_ids=[
'sg-11111111'
],
ex_assign_public_ip=True,
)
def test_create_node_with_ex_terminate_on_shutdown(self):
EC2MockHttp.type = 'create_ex_terminate_on_shutdown'
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
# The important part about the test is asserted inside
# EC2MockHttp._create_ex_terminate_on_shutdown
self.driver.create_node(name='foo', image=image, size=size, ex_terminate_on_shutdown=True)
def test_create_node_with_metadata(self):
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
node = self.driver.create_node(name='foo',
image=image,
size=size,
ex_metadata={'Bar': 'baz', 'Num': '42'})
self.assertEqual(node.name, 'foo')
self.assertEqual(node.extra['tags']['Name'], 'foo')
self.assertEqual(node.extra['tags']['Bar'], 'baz')
self.assertEqual(node.extra['tags']['Num'], '42')
self.assertEqual(len(node.extra['tags']), 3)
def test_create_node_idempotent(self):
EC2MockHttp.type = 'idempotent'
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
token = 'testclienttoken'
node = self.driver.create_node(name='foo', image=image, size=size,
ex_clienttoken=token)
self.assertEqual(node.id, 'i-2ba64342')
self.assertEqual(node.extra['client_token'], token)
# from: http://docs.amazonwebservices.com/AWSEC2/latest/DeveloperGuide/index.html?Run_Instance_Idempotency.html
# If you repeat the request with the same client token, but change
# another request parameter, Amazon EC2 returns an
# IdempotentParameterMismatch error.
# In our case, changing the parameter doesn't actually matter since we
# are forcing the error response fixture.
EC2MockHttp.type = 'idempotent_mismatch'
idem_error = None
# different count
try:
self.driver.create_node(name='foo', image=image, size=size,
ex_mincount='2', ex_maxcount='2',
ex_clienttoken=token)
except IdempotentParamError:
e = sys.exc_info()[1]
idem_error = e
self.assertTrue(idem_error is not None)
def test_create_node_no_availability_zone(self):
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
node = self.driver.create_node(name='foo', image=image, size=size)
location = NodeLocation(0, 'Amazon US N. Virginia', 'US', self.driver)
self.assertEqual(node.id, 'i-2ba64342')
node = self.driver.create_node(name='foo', image=image, size=size,
location=location)
self.assertEqual(node.id, 'i-2ba64342')
self.assertEqual(node.name, 'foo')
def test_list_nodes(self):
node = self.driver.list_nodes()[0]
public_ips = sorted(node.public_ips)
self.assertEqual(node.id, 'i-4382922a')
self.assertEqual(node.name, node.id)
self.assertEqual(len(node.public_ips), 2)
self.assertEqual(node.extra['launch_time'],
'2013-12-02T11:58:11.000Z')
self.assertTrue('instance_type' in node.extra)
self.assertEqual(node.extra['availability'], 'us-east-1d')
self.assertEqual(node.extra['key_name'], 'fauxkey')
self.assertEqual(node.extra['monitoring'], 'disabled')
self.assertEqual(node.extra['image_id'], 'ami-3215fe5a')
self.assertEqual(len(node.extra['groups']), 2)
self.assertEqual(len(node.extra['block_device_mapping']), 1)
self.assertEqual(node.extra['block_device_mapping'][0]['device_name'], '/dev/sda1')
self.assertEqual(node.extra['block_device_mapping'][0]['ebs']['volume_id'], 'vol-5e312311')
self.assertTrue(node.extra['block_device_mapping'][0]['ebs']['delete'])
self.assertEqual(public_ips[0], '1.2.3.4')
nodes = self.driver.list_nodes(ex_node_ids=['i-4382922a',
'i-8474834a'])
ret_node1 = nodes[0]
ret_node2 = nodes[1]
self.assertEqual(ret_node1.id, 'i-4382922a')
self.assertEqual(ret_node2.id, 'i-8474834a')
self.assertEqual(ret_node2.name, 'Test Server 2')
self.assertEqual(ret_node2.extra['subnet_id'], 'subnet-5fd9d412')
self.assertEqual(ret_node2.extra['vpc_id'], 'vpc-61dcd30e')
self.assertEqual(ret_node2.extra['tags']['Group'], 'VPC Test')
self.assertEqual(ret_node1.extra['launch_time'],
'2013-12-02T11:58:11.000Z')
self.assertTrue('instance_type' in ret_node1.extra)
self.assertEqual(ret_node2.extra['launch_time'],
'2013-12-02T15:58:29.000Z')
self.assertTrue('instance_type' in ret_node2.extra)
def test_ex_list_reserved_nodes(self):
node = self.driver.ex_list_reserved_nodes()[0]
self.assertEqual(node.id, '93bbbca2-c500-49d0-9ede-9d8737400498')
self.assertEqual(node.state, 'active')
self.assertEqual(node.extra['instance_type'], 't1.micro')
self.assertEqual(node.extra['availability'], 'us-east-1b')
self.assertEqual(node.extra['start'], '2013-06-18T12:07:53.161Z')
self.assertEqual(node.extra['duration'], 31536000)
self.assertEqual(node.extra['usage_price'], 0.012)
self.assertEqual(node.extra['fixed_price'], 23.0)
self.assertEqual(node.extra['instance_count'], 1)
self.assertEqual(node.extra['description'], 'Linux/UNIX')
self.assertEqual(node.extra['instance_tenancy'], 'default')
self.assertEqual(node.extra['currency_code'], 'USD')
self.assertEqual(node.extra['offering_type'], 'Light Utilization')
def test_list_location(self):
locations = self.driver.list_locations()
self.assertTrue(len(locations) > 0)
self.assertEqual(locations[0].name, 'eu-west-1a')
self.assertTrue(locations[0].availability_zone is not None)
self.assertTrue(isinstance(locations[0].availability_zone,
ExEC2AvailabilityZone))
def test_list_security_groups(self):
groups = self.driver.ex_list_security_groups()
self.assertEqual(groups, ['WebServers', 'RangedPortsBySource'])
def test_ex_delete_security_group_by_id(self):
group_id = 'sg-443d0a12'
retValue = self.driver.ex_delete_security_group_by_id(group_id)
self.assertTrue(retValue)
def test_delete_security_group_by_name(self):
group_name = 'WebServers'
retValue = self.driver.ex_delete_security_group_by_name(group_name)
self.assertTrue(retValue)
def test_ex_delete_security_group(self):
name = 'WebServers'
retValue = self.driver.ex_delete_security_group(name)
self.assertTrue(retValue)
def test_authorize_security_group(self):
resp = self.driver.ex_authorize_security_group('TestGroup', '22', '22',
'0.0.0.0/0')
self.assertTrue(resp)
def test_authorize_security_group_ingress(self):
ranges = ['1.1.1.1/32', '2.2.2.2/32']
resp = self.driver.ex_authorize_security_group_ingress('sg-42916629', 22, 22, cidr_ips=ranges)
self.assertTrue(resp)
groups = [{'group_id': 'sg-949265ff'}]
resp = self.driver.ex_authorize_security_group_ingress('sg-42916629', 22, 23, group_pairs=groups)
self.assertTrue(resp)
def test_authorize_security_group_egress(self):
ranges = ['1.1.1.1/32', '2.2.2.2/32']
resp = self.driver.ex_authorize_security_group_ingress('sg-42916629', 22, 22, cidr_ips=ranges)
self.assertTrue(resp)
groups = [{'group_id': 'sg-949265ff'}]
resp = self.driver.ex_authorize_security_group_ingress('sg-42916629', 22, 22, group_pairs=groups)
self.assertTrue(resp)
def test_revoke_security_group_ingress(self):
ranges = ['1.1.1.1/32', '2.2.2.2/32']
resp = self.driver.ex_authorize_security_group_ingress('sg-42916629', 22, 22, cidr_ips=ranges)
self.assertTrue(resp)
groups = [{'group_id': 'sg-949265ff'}]
resp = self.driver.ex_authorize_security_group_ingress('sg-42916629', 22, 22, group_pairs=groups)
self.assertTrue(resp)
def test_revoke_security_group_egress(self):
ranges = ['1.1.1.1/32', '2.2.2.2/32']
resp = self.driver.ex_authorize_security_group_ingress('sg-42916629', 22, 22, cidr_ips=ranges)
self.assertTrue(resp)
groups = [{'group_id': 'sg-949265ff'}]
resp = self.driver.ex_authorize_security_group_ingress('sg-42916629', 22, 22, group_pairs=groups)
self.assertTrue(resp)
def test_reboot_node(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
ret = self.driver.reboot_node(node)
self.assertTrue(ret)
def test_ex_start_node(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
ret = self.driver.ex_start_node(node)
self.assertTrue(ret)
def test_ex_stop_node(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
ret = self.driver.ex_stop_node(node)
self.assertTrue(ret)
def test_ex_create_node_with_ex_blockdevicemappings(self):
EC2MockHttp.type = 'create_ex_blockdevicemappings'
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
mappings = [
{'DeviceName': '/dev/sda1', 'Ebs.VolumeSize': 10},
{'DeviceName': '/dev/sdb', 'VirtualName': 'ephemeral0'},
{'DeviceName': '/dev/sdc', 'VirtualName': 'ephemeral1'}
]
node = self.driver.create_node(name='foo', image=image, size=size,
ex_blockdevicemappings=mappings)
self.assertEqual(node.id, 'i-2ba64342')
def test_ex_create_node_with_ex_blockdevicemappings_attribute_error(self):
EC2MockHttp.type = 'create_ex_blockdevicemappings'
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
mappings = 'this should be a list'
self.assertRaises(AttributeError, self.driver.create_node, name='foo',
image=image, size=size,
ex_blockdevicemappings=mappings)
mappings = ['this should be a dict']
self.assertRaises(AttributeError, self.driver.create_node, name='foo',
image=image, size=size,
ex_blockdevicemappings=mappings)
def test_destroy_node(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
ret = self.driver.destroy_node(node)
self.assertTrue(ret)
def test_list_sizes(self):
region_old = self.driver.region_name
names = [
('ec2_us_east', 'us-east-1'),
('ec2_us_west', 'us-west-1'),
('ec2_eu_west', 'eu-west-1'),
('ec2_ap_southeast', 'ap-southeast-1'),
('ec2_ap_northeast', 'ap-northeast-1'),
('ec2_ap_southeast_2', 'ap-southeast-2')
]
for api_name, region_name in names:
self.driver.api_name = api_name
self.driver.region_name = region_name
sizes = self.driver.list_sizes()
ids = [s.id for s in sizes]
self.assertTrue('t1.micro' in ids)
self.assertTrue('m1.small' in ids)
self.assertTrue('m1.large' in ids)
self.assertTrue('m1.xlarge' in ids)
self.assertTrue('c1.medium' in ids)
self.assertTrue('c1.xlarge' in ids)
self.assertTrue('m2.xlarge' in ids)
self.assertTrue('m2.2xlarge' in ids)
self.assertTrue('m2.4xlarge' in ids)
if region_name == 'us-east-1':
self.assertEqual(len(sizes), 52)
self.assertTrue('cg1.4xlarge' in ids)
self.assertTrue('cc2.8xlarge' in ids)
self.assertTrue('cr1.8xlarge' in ids)
elif region_name == 'us-west-1':
self.assertEqual(len(sizes), 44)
if region_name == 'us-west-2':
self.assertEqual(len(sizes), 41)
elif region_name == 'ap-southeast-1':
self.assertEqual(len(sizes), 42)
elif region_name == 'ap-southeast-2':
self.assertEqual(len(sizes), 47)
elif region_name == 'eu-west-1':
self.assertEqual(len(sizes), 50)
self.driver.region_name = region_old
def test_ex_create_node_with_ex_iam_profile(self):
iamProfile = {
'id': 'AIDGPMS9RO4H3FEXAMPLE',
'name': 'Foo',
'arn': 'arn:aws:iam:...'
}
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
EC2MockHttp.type = None
node1 = self.driver.create_node(name='foo', image=image, size=size)
EC2MockHttp.type = 'ex_iam_profile'
node2 = self.driver.create_node(name='bar', image=image, size=size,
ex_iam_profile=iamProfile['name'])
node3 = self.driver.create_node(name='bar', image=image, size=size,
ex_iam_profile=iamProfile['arn'])
self.assertFalse(node1.extra['iam_profile'])
self.assertEqual(node2.extra['iam_profile'], iamProfile['id'])
self.assertEqual(node3.extra['iam_profile'], iamProfile['id'])
def test_list_images(self):
images = self.driver.list_images()
self.assertEqual(len(images), 2)
location = '123456788908/Test Image'
self.assertEqual(images[0].id, 'ami-57ba933a')
self.assertEqual(images[0].name, 'Test Image')
self.assertEqual(images[0].extra['image_location'], location)
self.assertEqual(images[0].extra['architecture'], 'x86_64')
self.assertEqual(len(images[0].extra['block_device_mapping']), 2)
ephemeral = images[0].extra['block_device_mapping'][1]['virtual_name']
self.assertEqual(ephemeral, 'ephemeral0')
location = '123456788908/Test Image 2'
self.assertEqual(images[1].id, 'ami-85b2a8ae')
self.assertEqual(images[1].name, 'Test Image 2')
self.assertEqual(images[1].extra['image_location'], location)
self.assertEqual(images[1].extra['architecture'], 'x86_64')
size = images[1].extra['block_device_mapping'][0]['ebs']['volume_size']
self.assertEqual(size, 20)
def test_list_images_with_image_ids(self):
EC2MockHttp.type = 'ex_imageids'
images = self.driver.list_images(ex_image_ids=['ami-57ba933a'])
self.assertEqual(len(images), 1)
self.assertEqual(images[0].name, 'Test Image')
def test_list_images_with_executable_by(self):
images = self.driver.list_images(ex_executableby='self')
self.assertEqual(len(images), 2)
def test_get_image(self):
image = self.driver.get_image('ami-57ba933a')
self.assertEqual(image.id, 'ami-57ba933a')
self.assertEqual(image.name, 'Test Image')
self.assertEqual(image.extra['architecture'], 'x86_64')
self.assertEqual(len(image.extra['block_device_mapping']), 2)
def test_copy_image(self):
image = self.driver.list_images()[0]
resp = self.driver.copy_image(image, 'us-east-1',
name='Faux Image',
description='Test Image Copy')
self.assertEqual(resp.id, 'ami-4db38224')
def test_create_image(self):
node = self.driver.list_nodes()[0]
mapping = [{'VirtualName': None,
'Ebs': {'VolumeSize': 10,
'VolumeType': 'standard',
'DeleteOnTermination': 'true'},
'DeviceName': '/dev/sda1'}]
resp = self.driver.create_image(node,
'New Image',
description='New EBS Image',
block_device_mapping=mapping)
self.assertEqual(resp.id, 'ami-e9b38280')
def test_create_image_no_mapping(self):
node = self.driver.list_nodes()[0]
resp = self.driver.create_image(node,
'New Image',
description='New EBS Image')
self.assertEqual(resp.id, 'ami-e9b38280')
def delete_image(self):
images = self.driver.list_images()
image = images[0]
resp = self.driver.delete_image(image)
self.assertTrue(resp)
def ex_register_image(self):
mapping = [{'DeviceName': '/dev/sda1',
'Ebs': {'SnapshotId': 'snap-5ade3e4e'}}]
image = self.driver.ex_register_image(name='Test Image',
root_device_name='/dev/sda1',
description='My Image',
architecture='x86_64',
block_device_mapping=mapping)
self.assertEqual(image.id, 'ami-57c2fb3e')
def test_ex_list_availability_zones(self):
availability_zones = self.driver.ex_list_availability_zones()
availability_zone = availability_zones[0]
self.assertTrue(len(availability_zones) > 0)
self.assertEqual(availability_zone.name, 'eu-west-1a')
self.assertEqual(availability_zone.zone_state, 'available')
self.assertEqual(availability_zone.region_name, 'eu-west-1')
def test_list_keypairs(self):
keypairs = self.driver.list_key_pairs()
self.assertEqual(len(keypairs), 1)
self.assertEqual(keypairs[0].name, 'gsg-keypair')
self.assertEqual(keypairs[0].fingerprint, null_fingerprint)
# Test old deprecated method
keypairs = self.driver.ex_list_keypairs()
self.assertEqual(len(keypairs), 1)
self.assertEqual(keypairs[0]['keyName'], 'gsg-keypair')
self.assertEqual(keypairs[0]['keyFingerprint'], null_fingerprint)
def test_get_key_pair(self):
EC2MockHttp.type = 'get_one'
key_pair = self.driver.get_key_pair(name='gsg-keypair')
self.assertEqual(key_pair.name, 'gsg-keypair')
def test_get_key_pair_does_not_exist(self):
EC2MockHttp.type = 'doesnt_exist'
self.assertRaises(KeyPairDoesNotExistError, self.driver.get_key_pair,
name='test-key-pair')
def test_create_key_pair(self):
key_pair = self.driver.create_key_pair(name='test-keypair')
fingerprint = ('1f:51:ae:28:bf:89:e9:d8:1f:25:5d'
':37:2d:7d:b8:ca:9f:f5:f1:6f')
self.assertEqual(key_pair.name, 'my-key-pair')
self.assertEqual(key_pair.fingerprint, fingerprint)
self.assertTrue(key_pair.private_key is not None)
# Test old and deprecated method
key_pair = self.driver.ex_create_keypair(name='test-keypair')
self.assertEqual(key_pair['keyFingerprint'], fingerprint)
self.assertTrue(key_pair['keyMaterial'] is not None)
def test_ex_describe_all_keypairs(self):
keys = self.driver.ex_describe_all_keypairs()
self.assertEqual(keys, ['gsg-keypair'])
def test_list_key_pairs(self):
keypair1 = self.driver.list_key_pairs()[0]
self.assertEqual(keypair1.name, 'gsg-keypair')
self.assertEqual(keypair1.fingerprint, null_fingerprint)
# Test backward compatibility
keypair2 = self.driver.ex_describe_keypairs('gsg-keypair')
self.assertEqual(keypair2['keyName'], 'gsg-keypair')
self.assertEqual(keypair2['keyFingerprint'], null_fingerprint)
def test_delete_key_pair(self):
keypair = self.driver.list_key_pairs()[0]
success = self.driver.delete_key_pair(keypair)
self.assertTrue(success)
# Test old and deprecated method
resp = self.driver.ex_delete_keypair('gsg-keypair')
self.assertTrue(resp)
def test_ex_describe_tags(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
tags = self.driver.ex_describe_tags(resource=node)
self.assertEqual(len(tags), 3)
self.assertTrue('tag' in tags)
self.assertTrue('owner' in tags)
self.assertTrue('stack' in tags)
def test_import_key_pair_from_string(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'misc',
'dummy_rsa.pub')
with open(path, 'r') as fp:
key_material = fp.read()
key = self.driver.import_key_pair_from_string(name='keypair',
key_material=key_material)
self.assertEqual(key.name, 'keypair')
self.assertEqual(key.fingerprint, null_fingerprint)
# Test old and deprecated method
key = self.driver.ex_import_keypair_from_string('keypair',
key_material)
self.assertEqual(key['keyName'], 'keypair')
self.assertEqual(key['keyFingerprint'], null_fingerprint)
def test_import_key_pair_from_file(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'misc',
'dummy_rsa.pub')
key = self.driver.import_key_pair_from_file('keypair', path)
self.assertEqual(key.name, 'keypair')
self.assertEqual(key.fingerprint, null_fingerprint)
# Test old and deprecated method
key = self.driver.ex_import_keypair('keypair', path)
self.assertEqual(key['keyName'], 'keypair')
self.assertEqual(key['keyFingerprint'], null_fingerprint)
def test_ex_create_tags(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
self.driver.ex_create_tags(node, {'sample': 'tag'})
def test_ex_delete_tags(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
self.driver.ex_delete_tags(node, {'sample': 'tag'})
def test_ex_describe_addresses_for_node(self):
node1 = Node('i-4382922a', None, None, None, None, self.driver)
ip_addresses1 = self.driver.ex_describe_addresses_for_node(node1)
node2 = Node('i-4382922b', None, None, None, None, self.driver)
ip_addresses2 = sorted(
self.driver.ex_describe_addresses_for_node(node2))
node3 = Node('i-4382922g', None, None, None, None, self.driver)
ip_addresses3 = sorted(
self.driver.ex_describe_addresses_for_node(node3))
self.assertEqual(len(ip_addresses1), 1)
self.assertEqual(ip_addresses1[0], '1.2.3.4')
self.assertEqual(len(ip_addresses2), 2)
self.assertEqual(ip_addresses2[0], '1.2.3.5')
self.assertEqual(ip_addresses2[1], '1.2.3.6')
self.assertEqual(len(ip_addresses3), 0)
def test_ex_describe_addresses(self):
node1 = Node('i-4382922a', None, None, None, None, self.driver)
node2 = Node('i-4382922g', None, None, None, None, self.driver)
nodes_elastic_ips1 = self.driver.ex_describe_addresses([node1])
nodes_elastic_ips2 = self.driver.ex_describe_addresses([node2])
self.assertEqual(len(nodes_elastic_ips1), 1)
self.assertTrue(node1.id in nodes_elastic_ips1)
self.assertEqual(nodes_elastic_ips1[node1.id], ['1.2.3.4'])
self.assertEqual(len(nodes_elastic_ips2), 1)
self.assertTrue(node2.id in nodes_elastic_ips2)
self.assertEqual(nodes_elastic_ips2[node2.id], [])
def test_ex_describe_all_addresses(self):
EC2MockHttp.type = 'all_addresses'
elastic_ips1 = self.driver.ex_describe_all_addresses()
elastic_ips2 = self.driver.ex_describe_all_addresses(
only_associated=True)
self.assertEqual('1.2.3.7', elastic_ips1[3].ip)
self.assertEqual('vpc', elastic_ips1[3].domain)
self.assertEqual('eipalloc-992a5cf8', elastic_ips1[3].extra['allocation_id'])
self.assertEqual(len(elastic_ips2), 2)
self.assertEqual('1.2.3.5', elastic_ips2[1].ip)
self.assertEqual('vpc', elastic_ips2[1].domain)
def test_ex_allocate_address(self):
elastic_ip = self.driver.ex_allocate_address()
self.assertEqual('192.0.2.1', elastic_ip.ip)
self.assertEqual('standard', elastic_ip.domain)
EC2MockHttp.type = 'vpc'
elastic_ip = self.driver.ex_allocate_address(domain='vpc')
self.assertEqual('192.0.2.2', elastic_ip.ip)
self.assertEqual('vpc', elastic_ip.domain)
self.assertEqual('eipalloc-666d7f04', elastic_ip.extra['allocation_id'])
def test_ex_release_address(self):
EC2MockHttp.type = 'all_addresses'
elastic_ips = self.driver.ex_describe_all_addresses()
EC2MockHttp.type = ''
ret = self.driver.ex_release_address(elastic_ips[2])
self.assertTrue(ret)
ret = self.driver.ex_release_address(elastic_ips[0], domain='vpc')
self.assertTrue(ret)
self.assertRaises(AttributeError,
self.driver.ex_release_address,
elastic_ips[0],
domain='bogus')
def test_ex_associate_address_with_node(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
EC2MockHttp.type = 'all_addresses'
elastic_ips = self.driver.ex_describe_all_addresses()
EC2MockHttp.type = ''
ret1 = self.driver.ex_associate_address_with_node(
node, elastic_ips[2])
ret2 = self.driver.ex_associate_addresses(
node, elastic_ips[2])
self.assertEqual(None, ret1)
self.assertEqual(None, ret2)
EC2MockHttp.type = 'vpc'
ret3 = self.driver.ex_associate_address_with_node(
node, elastic_ips[3], domain='vpc')
ret4 = self.driver.ex_associate_addresses(
node, elastic_ips[3], domain='vpc')
self.assertEqual('eipassoc-167a8073', ret3)
self.assertEqual('eipassoc-167a8073', ret4)
self.assertRaises(AttributeError,
self.driver.ex_associate_address_with_node,
node,
elastic_ips[1],
domain='bogus')
def test_ex_disassociate_address(self):
EC2MockHttp.type = 'all_addresses'
elastic_ips = self.driver.ex_describe_all_addresses()
EC2MockHttp.type = ''
ret = self.driver.ex_disassociate_address(elastic_ips[2])
self.assertTrue(ret)
# Test a VPC disassociation
ret = self.driver.ex_disassociate_address(elastic_ips[1],
domain='vpc')
self.assertTrue(ret)
self.assertRaises(AttributeError,
self.driver.ex_disassociate_address,
elastic_ips[1],
domain='bogus')
def test_ex_change_node_size_same_size(self):
size = NodeSize('m1.small', 'Small Instance',
None, None, None, None, driver=self.driver)
node = Node('i-4382922a', None, None, None, None, self.driver,
extra={'instancetype': 'm1.small'})
try:
self.driver.ex_change_node_size(node=node, new_size=size)
except ValueError:
pass
else:
self.fail('Same size was passed, but an exception was not thrown')
def test_ex_change_node_size(self):
size = NodeSize('m1.large', 'Small Instance',
None, None, None, None, driver=self.driver)
node = Node('i-4382922a', None, None, None, None, self.driver,
extra={'instancetype': 'm1.small'})
result = self.driver.ex_change_node_size(node=node, new_size=size)
self.assertTrue(result)
def test_list_volumes(self):
volumes = self.driver.list_volumes()
self.assertEqual(len(volumes), 3)
self.assertEqual('vol-10ae5e2b', volumes[0].id)
self.assertEqual(1, volumes[0].size)
self.assertEqual('available', volumes[0].extra['state'])
self.assertEqual(StorageVolumeState.AVAILABLE, volumes[0].state)
self.assertEqual('vol-v24bfh75', volumes[1].id)
self.assertEqual(11, volumes[1].size)
self.assertIsNone(volumes[1].extra['snapshot_id'])
self.assertEqual('in-use', volumes[1].extra['state'])
self.assertEqual(StorageVolumeState.INUSE, volumes[1].state)
self.assertEqual('vol-b6c851ec', volumes[2].id)
self.assertEqual(8, volumes[2].size)
self.assertEqual('some-unknown-status', volumes[2].extra['state'])
self.assertEqual('i-d334b4b3', volumes[2].extra['instance_id'])
self.assertEqual('/dev/sda1', volumes[2].extra['device'])
self.assertEqual('snap-30d37269', volumes[2].extra['snapshot_id'])
self.assertEqual(StorageVolumeState.UNKNOWN, volumes[2].state)
def test_create_volume(self):
location = self.driver.list_locations()[0]
vol = self.driver.create_volume(10, 'vol', location)
self.assertEqual(10, vol.size)
self.assertEqual('vol', vol.name)
self.assertEqual('creating', vol.extra['state'])
self.assertTrue(isinstance(vol.extra['create_time'], datetime))
def test_destroy_volume(self):
vol = StorageVolume(id='vol-4282672b', name='test',
state=StorageVolumeState.AVAILABLE,
size=10, driver=self.driver)
retValue = self.driver.destroy_volume(vol)
self.assertTrue(retValue)
def test_attach(self):
vol = StorageVolume(id='vol-4282672b', name='test',
size=10, state=StorageVolumeState.AVAILABLE,
driver=self.driver)
node = Node('i-4382922a', None, None, None, None, self.driver)
retValue = self.driver.attach_volume(node, vol, '/dev/sdh')
self.assertTrue(retValue)
def test_detach(self):
vol = StorageVolume(id='vol-4282672b', name='test',
state=StorageVolumeState.INUSE,
size=10, driver=self.driver)
retValue = self.driver.detach_volume(vol)
self.assertTrue(retValue)
def test_create_volume_snapshot(self):
vol = StorageVolume(id='vol-4282672b', name='test',
state=StorageVolumeState.AVAILABLE,
size=10, driver=self.driver)
snap = self.driver.create_volume_snapshot(
vol, 'Test snapshot')
self.assertEqual('snap-a7cb2hd9', snap.id)
self.assertEqual(vol.size, snap.size)
self.assertEqual('Test snapshot', snap.extra['name'])
self.assertEqual(vol.id, snap.extra['volume_id'])
self.assertEqual('pending', snap.extra['state'])
self.assertEqual(VolumeSnapshotState.CREATING, snap.state)
# 2013-08-15T16:22:30.000Z
self.assertEqual(datetime(2013, 8, 15, 16, 22, 30, tzinfo=UTC), snap.created)
def test_list_snapshots(self):
snaps = self.driver.list_snapshots()
self.assertEqual(len(snaps), 3)
self.assertEqual('snap-428abd35', snaps[0].id)
self.assertEqual(VolumeSnapshotState.CREATING, snaps[0].state)
self.assertEqual('vol-e020df80', snaps[0].extra['volume_id'])
self.assertEqual(30, snaps[0].size)
self.assertEqual('Daily Backup', snaps[0].extra['description'])
self.assertEqual('snap-18349159', snaps[1].id)
self.assertEqual(VolumeSnapshotState.AVAILABLE, snaps[1].state)
self.assertEqual('vol-b5a2c1v9', snaps[1].extra['volume_id'])
self.assertEqual(15, snaps[1].size)
self.assertEqual('Weekly backup', snaps[1].extra['description'])
self.assertEqual('DB Backup 1', snaps[1].extra['name'])
def test_list_volume_snapshots(self):
volume = self.driver.list_volumes()[0]
assert volume.id == 'vol-10ae5e2b'
snapshots = self.driver.list_volume_snapshots(volume)
self.assertEqual(len(snapshots), 1)
self.assertEqual(snapshots[0].id, 'snap-18349160')
def test_destroy_snapshot(self):
snap = VolumeSnapshot(id='snap-428abd35', size=10, driver=self.driver)
resp = snap.destroy()
self.assertTrue(resp)
def test_ex_modify_image_attribute(self):
images = self.driver.list_images()
image = images[0]
data = {'LaunchPermission.Add.1.Group': 'all'}
resp = self.driver.ex_modify_image_attribute(image, data)
self.assertTrue(resp)
def test_create_node_ex_security_groups(self):
EC2MockHttp.type = 'ex_security_groups'
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
security_groups = ['group1', 'group2']
# Old, deprecated argument name
self.driver.create_node(name='foo', image=image, size=size,
ex_securitygroup=security_groups)
# New argument name
self.driver.create_node(name='foo', image=image, size=size,
ex_security_groups=security_groups)
# Test old and new arguments are mutually exclusive
self.assertRaises(ValueError, self.driver.create_node,
name='foo', image=image, size=size,
ex_securitygroup=security_groups,
ex_security_groups=security_groups)
def test_create_node_ex_security_group_ids(self):
EC2MockHttp.type = 'ex_security_group_ids'
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
subnet = EC2NetworkSubnet(12345, "test_subnet", "pending")
security_groups = ['sg-1aa11a1a', 'sg-2bb22b2b']
self.driver.create_node(name='foo', image=image, size=size,
ex_security_group_ids=security_groups,
ex_subnet=subnet)
self.assertRaises(ValueError, self.driver.create_node,
name='foo', image=image, size=size,
ex_security_group_ids=security_groups)
def test_ex_get_metadata_for_node(self):
image = NodeImage(id='ami-be3adfd7',
name=self.image_name,
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
node = self.driver.create_node(name='foo',
image=image,
size=size,
ex_metadata={'Bar': 'baz', 'Num': '42'})
metadata = self.driver.ex_get_metadata_for_node(node)
self.assertEqual(metadata['Name'], 'foo')
self.assertEqual(metadata['Bar'], 'baz')
self.assertEqual(metadata['Num'], '42')
self.assertEqual(len(metadata), 3)
def test_ex_get_limits(self):
limits = self.driver.ex_get_limits()
expected = {'max-instances': 20, 'vpc-max-elastic-ips': 5,
'max-elastic-ips': 5}
self.assertEqual(limits['resource'], expected)
def test_ex_create_security_group(self):
group = self.driver.ex_create_security_group("WebServers",
"Rules to protect web nodes",
"vpc-143cab4")
self.assertEqual(group["group_id"], "sg-52e2f530")
def test_ex_create_placement_groups(self):
resp = self.driver.ex_create_placement_group("NewPG")
self.assertTrue(resp)
def test_ex_delete_placement_groups(self):
pgs = self.driver.ex_list_placement_groups()
pg = pgs[0]
resp = self.driver.ex_delete_placement_group(pg.name)
self.assertTrue(resp)
def test_ex_list_placement_groups(self):
pgs = self.driver.ex_list_placement_groups()
self.assertEqual(len(pgs), 2)
self.assertIsInstance(pgs[0], EC2PlacementGroup)
def test_ex_list_networks(self):
vpcs = self.driver.ex_list_networks()
self.assertEqual(len(vpcs), 2)
self.assertEqual('vpc-532335e1', vpcs[0].id)
self.assertEqual('vpc-532335e1', vpcs[0].name)
self.assertEqual('192.168.51.0/24', vpcs[0].cidr_block)
self.assertEqual('available', vpcs[0].extra['state'])
self.assertEqual('dopt-7eded312', vpcs[0].extra['dhcp_options_id'])
self.assertEqual('vpc-62ded30e', vpcs[1].id)
self.assertEqual('Test VPC', vpcs[1].name)
self.assertEqual('192.168.52.0/24', vpcs[1].cidr_block)
self.assertEqual('available', vpcs[1].extra['state'])
self.assertEqual('dopt-7eded312', vpcs[1].extra['dhcp_options_id'])
def test_ex_list_networks_network_ids(self):
EC2MockHttp.type = 'network_ids'
network_ids = ['vpc-532335e1']
# We assert in the mock http method
self.driver.ex_list_networks(network_ids=network_ids)
def test_ex_list_networks_filters(self):
EC2MockHttp.type = 'filters'
filters = {'dhcp-options-id': 'dopt-7eded312', # matches two networks
'cidr': '192.168.51.0/24'} # matches one network
# We assert in the mock http method
self.driver.ex_list_networks(filters=filters)
def test_ex_create_network(self):
vpc = self.driver.ex_create_network('192.168.55.0/24',
name='Test VPC',
instance_tenancy='default')
self.assertEqual('vpc-ad3527cf', vpc.id)
self.assertEqual('192.168.55.0/24', vpc.cidr_block)
self.assertEqual('pending', vpc.extra['state'])
def test_ex_delete_network(self):
vpcs = self.driver.ex_list_networks()
vpc = vpcs[0]
resp = self.driver.ex_delete_network(vpc)
self.assertTrue(resp)
def test_ex_list_subnets(self):
subnets = self.driver.ex_list_subnets()
self.assertEqual(len(subnets), 2)
self.assertEqual('subnet-ce0e7ce5', subnets[0].id)
self.assertEqual('available', subnets[0].state)
self.assertEqual(123, subnets[0].extra['available_ips'])
self.assertEqual('subnet-ce0e7ce6', subnets[1].id)
self.assertEqual('available', subnets[1].state)
self.assertEqual(59, subnets[1].extra['available_ips'])
def test_ex_create_subnet(self):
subnet = self.driver.ex_create_subnet('vpc-532135d1',
'192.168.51.128/26',
'us-east-1b',
name='Test Subnet')
self.assertEqual('subnet-ce0e7ce6', subnet.id)
self.assertEqual('pending', subnet.state)
self.assertEqual('vpc-532135d1', subnet.extra['vpc_id'])
def test_ex_delete_subnet(self):
subnet = self.driver.ex_list_subnets()[0]
resp = self.driver.ex_delete_subnet(subnet=subnet)
self.assertTrue(resp)
def test_ex_get_console_output(self):
node = self.driver.list_nodes()[0]
resp = self.driver.ex_get_console_output(node)
self.assertEqual('Test String', resp['output'])
def test_ex_list_network_interfaces(self):
interfaces = self.driver.ex_list_network_interfaces()
self.assertEqual(len(interfaces), 2)
self.assertEqual('eni-18e6c05e', interfaces[0].id)
self.assertEqual('in-use', interfaces[0].state)
self.assertEqual('0e:6e:df:72:78:af',
interfaces[0].extra['mac_address'])
self.assertEqual('eni-83e3c5c5', interfaces[1].id)
self.assertEqual('in-use', interfaces[1].state)
self.assertEqual('0e:93:0b:e9:e9:c4',
interfaces[1].extra['mac_address'])
def test_ex_create_network_interface(self):
subnet = self.driver.ex_list_subnets()[0]
interface = self.driver.ex_create_network_interface(
subnet,
name='Test Interface',
description='My Test')
self.assertEqual('eni-2b36086d', interface.id)
self.assertEqual('pending', interface.state)
self.assertEqual('0e:bd:49:3e:11:74', interface.extra['mac_address'])
def test_ex_delete_network_interface(self):
interface = self.driver.ex_list_network_interfaces()[0]
resp = self.driver.ex_delete_network_interface(interface)
self.assertTrue(resp)
def test_ex_attach_network_interface_to_node(self):
node = self.driver.list_nodes()[0]
interface = self.driver.ex_list_network_interfaces()[0]
resp = self.driver.ex_attach_network_interface_to_node(interface,
node, 1)
self.assertTrue(resp)
def test_ex_detach_network_interface(self):
resp = self.driver.ex_detach_network_interface('eni-attach-2b588b47')
self.assertTrue(resp)
def test_ex_list_internet_gateways(self):
gateways = self.driver.ex_list_internet_gateways()
self.assertEqual(len(gateways), 2)
self.assertEqual('igw-84dd3ae1', gateways[0].id)
self.assertEqual('igw-7fdae215', gateways[1].id)
self.assertEqual('available', gateways[1].state)
self.assertEqual('vpc-62cad41e', gateways[1].vpc_id)
def test_ex_create_internet_gateway(self):
gateway = self.driver.ex_create_internet_gateway()
self.assertEqual('igw-13ac2b36', gateway.id)
def test_ex_delete_internet_gateway(self):
gateway = self.driver.ex_list_internet_gateways()[0]
resp = self.driver.ex_delete_internet_gateway(gateway)
self.assertTrue(resp)
def test_ex_attach_internet_gateway(self):
gateway = self.driver.ex_list_internet_gateways()[0]
network = self.driver.ex_list_networks()[0]
resp = self.driver.ex_attach_internet_gateway(gateway, network)
self.assertTrue(resp)
def test_ex_detach_internet_gateway(self):
gateway = self.driver.ex_list_internet_gateways()[0]
network = self.driver.ex_list_networks()[0]
resp = self.driver.ex_detach_internet_gateway(gateway, network)
self.assertTrue(resp)
class EC2USWest1Tests(EC2Tests):
region = 'us-west-1'
class EC2USWest2Tests(EC2Tests):
region = 'us-west-2'
class EC2EUWestTests(EC2Tests):
region = 'eu-west-1'
class EC2APSE1Tests(EC2Tests):
region = 'ap-southeast-1'
class EC2APNETests(EC2Tests):
region = 'ap-northeast-1'
class EC2APSE2Tests(EC2Tests):
region = 'ap-southeast-2'
class EC2SAEastTests(EC2Tests):
region = 'sa-east-1'
# Tests for the old, deprecated way of instantiating a driver.
class EC2OldStyleModelTests(EC2Tests):
driver_klass = EC2USWestNodeDriver
def setUp(self):
EC2MockHttp.test = self
EC2NodeDriver.connectionCls.conn_classes = (None, EC2MockHttp)
EC2MockHttp.use_param = 'Action'
EC2MockHttp.type = None
self.driver = self.driver_klass(*EC2_PARAMS)
class EC2USWest1OldStyleModelTests(EC2OldStyleModelTests):
driver_klass = EC2USWestNodeDriver
class EC2USWest2OldStyleModelTests(EC2OldStyleModelTests):
driver_klass = EC2USWestOregonNodeDriver
class EC2EUWestOldStyleModelTests(EC2OldStyleModelTests):
driver_klass = EC2EUNodeDriver
class EC2APSE1OldStyleModelTests(EC2OldStyleModelTests):
driver_klass = EC2APSENodeDriver
class EC2APNEOldStyleModelTests(EC2OldStyleModelTests):
driver_klass = EC2APNENodeDriver
class EC2APSE2OldStyleModelTests(EC2OldStyleModelTests):
driver_klass = EC2APSESydneyNodeDriver
class EC2SAEastOldStyleModelTests(EC2OldStyleModelTests):
driver_klass = EC2SAEastNodeDriver
class EC2MockHttp(MockHttpTestCase):
fixtures = ComputeFileFixtures('ec2')
def _DescribeInstances(self, method, url, body, headers):
body = self.fixtures.load('describe_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeReservedInstances(self, method, url, body, headers):
body = self.fixtures.load('describe_reserved_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeAvailabilityZones(self, method, url, body, headers):
body = self.fixtures.load('describe_availability_zones.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _RebootInstances(self, method, url, body, headers):
body = self.fixtures.load('reboot_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _StartInstances(self, method, url, body, headers):
body = self.fixtures.load('start_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _StopInstances(self, method, url, body, headers):
body = self.fixtures.load('stop_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeSecurityGroups(self, method, url, body, headers):
body = self.fixtures.load('describe_security_groups.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteSecurityGroup(self, method, url, body, headers):
body = self.fixtures.load('delete_security_group.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _AuthorizeSecurityGroupIngress(self, method, url, body, headers):
body = self.fixtures.load('authorize_security_group_ingress.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeImages(self, method, url, body, headers):
body = self.fixtures.load('describe_images.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _RegisterImages(self, method, url, body, headers):
body = self.fixtures.load('register_image.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ex_imageids_DescribeImages(self, method, url, body, headers):
body = self.fixtures.load('describe_images_ex_imageids.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _RunInstances(self, method, url, body, headers):
body = self.fixtures.load('run_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _create_ex_assign_public_ip_RunInstances(self, method, url, body, headers):
self.assertUrlContainsQueryParams(url, {
'NetworkInterface.1.AssociatePublicIpAddress': "true",
'NetworkInterface.1.DeleteOnTermination': "true",
'NetworkInterface.1.DeviceIndex': "0",
'NetworkInterface.1.SubnetId': "subnet-11111111",
'NetworkInterface.1.SecurityGroupId.1': "sg-11111111",
})
body = self.fixtures.load('run_instances_with_subnet_and_security_group.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _create_ex_terminate_on_shutdown_RunInstances(self, method, url, body, headers):
self.assertUrlContainsQueryParams(url, {
'InstanceInitiatedShutdownBehavior': 'terminate'
})
body = self.fixtures.load('run_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ex_security_groups_RunInstances(self, method, url, body, headers):
self.assertUrlContainsQueryParams(url, {'SecurityGroup.1': 'group1'})
self.assertUrlContainsQueryParams(url, {'SecurityGroup.2': 'group2'})
body = self.fixtures.load('run_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ex_security_group_ids_RunInstances(self, method, url, body, headers):
self.assertUrlContainsQueryParams(url, {'SecurityGroupId.1': 'sg-1aa11a1a'})
self.assertUrlContainsQueryParams(url, {'SecurityGroupId.2': 'sg-2bb22b2b'})
body = self.fixtures.load('run_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _create_ex_blockdevicemappings_RunInstances(self, method, url, body, headers):
expected_params = {
'BlockDeviceMapping.1.DeviceName': '/dev/sda1',
'BlockDeviceMapping.1.Ebs.VolumeSize': '10',
'BlockDeviceMapping.2.DeviceName': '/dev/sdb',
'BlockDeviceMapping.2.VirtualName': 'ephemeral0',
'BlockDeviceMapping.3.DeviceName': '/dev/sdc',
'BlockDeviceMapping.3.VirtualName': 'ephemeral1'
}
self.assertUrlContainsQueryParams(url, expected_params)
body = self.fixtures.load('run_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _idempotent_RunInstances(self, method, url, body, headers):
body = self.fixtures.load('run_instances_idem.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _idempotent_mismatch_RunInstances(self, method, url, body, headers):
body = self.fixtures.load('run_instances_idem_mismatch.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.BAD_REQUEST])
def _ex_iam_profile_RunInstances(self, method, url, body, headers):
body = self.fixtures.load('run_instances_iam_profile.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _TerminateInstances(self, method, url, body, headers):
body = self.fixtures.load('terminate_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeKeyPairs(self, method, url, body, headers):
body = self.fixtures.load('describe_key_pairs.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _get_one_DescribeKeyPairs(self, method, url, body, headers):
self.assertUrlContainsQueryParams(url, {'KeyName': 'gsg-keypair'})
body = self.fixtures.load('describe_key_pairs.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _doesnt_exist_DescribeKeyPairs(self, method, url, body, headers):
body = self.fixtures.load('describe_key_pairs_doesnt_exist.xml')
return (httplib.BAD_REQUEST, body, {},
httplib.responses[httplib.BAD_REQUEST])
def _CreateKeyPair(self, method, url, body, headers):
body = self.fixtures.load('create_key_pair.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ImportKeyPair(self, method, url, body, headers):
body = self.fixtures.load('import_key_pair.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeTags(self, method, url, body, headers):
body = self.fixtures.load('describe_tags.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateTags(self, method, url, body, headers):
body = self.fixtures.load('create_tags.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteTags(self, method, url, body, headers):
body = self.fixtures.load('delete_tags.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeAddresses(self, method, url, body, headers):
body = self.fixtures.load('describe_addresses_multi.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _AllocateAddress(self, method, url, body, headers):
body = self.fixtures.load('allocate_address.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _vpc_AllocateAddress(self, method, url, body, headers):
body = self.fixtures.load('allocate_vpc_address.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _AssociateAddress(self, method, url, body, headers):
body = self.fixtures.load('associate_address.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _vpc_AssociateAddress(self, method, url, body, headers):
body = self.fixtures.load('associate_vpc_address.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DisassociateAddress(self, method, url, body, headers):
body = self.fixtures.load('disassociate_address.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ReleaseAddress(self, method, url, body, headers):
body = self.fixtures.load('release_address.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _all_addresses_DescribeAddresses(self, method, url, body, headers):
body = self.fixtures.load('describe_addresses_all.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _WITH_TAGS_DescribeAddresses(self, method, url, body, headers):
body = self.fixtures.load('describe_addresses_multi.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ModifyInstanceAttribute(self, method, url, body, headers):
body = self.fixtures.load('modify_instance_attribute.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _idempotent_CreateTags(self, method, url, body, headers):
body = self.fixtures.load('create_tags.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateVolume(self, method, url, body, headers):
body = self.fixtures.load('create_volume.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteVolume(self, method, url, body, headers):
body = self.fixtures.load('delete_volume.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _AttachVolume(self, method, url, body, headers):
body = self.fixtures.load('attach_volume.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DetachVolume(self, method, url, body, headers):
body = self.fixtures.load('detach_volume.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeVolumes(self, method, url, body, headers):
body = self.fixtures.load('describe_volumes.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateSnapshot(self, method, url, body, headers):
body = self.fixtures.load('create_snapshot.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeSnapshots(self, method, url, body, headers):
body = self.fixtures.load('describe_snapshots.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteSnapshot(self, method, url, body, headers):
body = self.fixtures.load('delete_snapshot.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CopyImage(self, method, url, body, headers):
body = self.fixtures.load('copy_image.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateImage(self, method, url, body, headers):
body = self.fixtures.load('create_image.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeregisterImage(self, method, url, body, headers):
body = self.fixtures.load('deregister_image.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteKeyPair(self, method, url, body, headers):
self.assertUrlContainsQueryParams(url, {'KeyName': 'gsg-keypair'})
body = self.fixtures.load('delete_key_pair.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ModifyImageAttribute(self, method, url, body, headers):
body = self.fixtures.load('modify_image_attribute.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeAccountAttributes(self, method, url, body, headers):
body = self.fixtures.load('describe_account_attributes.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateSecurityGroup(self, method, url, body, headers):
body = self.fixtures.load('create_security_group.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeVpcs(self, method, url, body, headers):
body = self.fixtures.load('describe_vpcs.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _network_ids_DescribeVpcs(self, method, url, body, headers):
expected_params = {
'VpcId.1': 'vpc-532335e1'
}
self.assertUrlContainsQueryParams(url, expected_params)
body = self.fixtures.load('describe_vpcs.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _filters_DescribeVpcs(self, method, url, body, headers):
expected_params_1 = {
'Filter.1.Name': 'dhcp-options-id',
'Filter.1.Value.1': 'dopt-7eded312',
'Filter.2.Name': 'cidr',
'Filter.2.Value.1': '192.168.51.0/24'
}
expected_params_2 = {
'Filter.1.Name': 'cidr',
'Filter.1.Value.1': '192.168.51.0/24',
'Filter.2.Name': 'dhcp-options-id',
'Filter.2.Value.1': 'dopt-7eded312'
}
try:
self.assertUrlContainsQueryParams(url, expected_params_1)
except AssertionError:
# dict ordering is not guaranteed
self.assertUrlContainsQueryParams(url, expected_params_2)
body = self.fixtures.load('describe_vpcs.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateVpc(self, method, url, body, headers):
body = self.fixtures.load('create_vpc.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteVpc(self, method, url, body, headers):
body = self.fixtures.load('delete_vpc.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeSubnets(self, method, url, body, headers):
body = self.fixtures.load('describe_subnets.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateSubnet(self, method, url, body, headers):
body = self.fixtures.load('create_subnet.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteSubnet(self, method, url, body, headers):
body = self.fixtures.load('delete_subnet.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _GetConsoleOutput(self, method, url, body, headers):
body = self.fixtures.load('get_console_output.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeNetworkInterfaces(self, method, url, body, headers):
body = self.fixtures.load('describe_network_interfaces.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateNetworkInterface(self, method, url, body, headers):
body = self.fixtures.load('create_network_interface.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteNetworkInterface(self, method, url, body, headers):
body = self.fixtures.load('delete_network_interface.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _AttachNetworkInterface(self, method, url, body, headers):
body = self.fixtures.load('attach_network_interface.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DetachNetworkInterface(self, method, url, body, headers):
body = self.fixtures.load('detach_network_interface.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeInternetGateways(self, method, url, body, headers):
body = self.fixtures.load('describe_internet_gateways.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateInternetGateway(self, method, url, body, headers):
body = self.fixtures.load('create_internet_gateway.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteInternetGateway(self, method, url, body, headers):
body = self.fixtures.load('delete_internet_gateway.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _AttachInternetGateway(self, method, url, body, headers):
body = self.fixtures.load('attach_internet_gateway.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DetachInternetGateway(self, method, url, body, headers):
body = self.fixtures.load('detach_internet_gateway.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreatePlacementGroup(self, method, url, body, headers):
body = self.fixtures.load('create_placement_groups.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeletePlacementGroup(self, method, url, body, headers):
body = self.fixtures.load('delete_placement_groups.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribePlacementGroups(self, method, url, body, headers):
body = self.fixtures.load('describe_placement_groups.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
class EucMockHttp(EC2MockHttp):
fixtures = ComputeFileFixtures('ec2')
def _services_Eucalyptus_DescribeInstances(self, method, url, body,
headers):
return self._DescribeInstances(method, url, body, headers)
def _services_Eucalyptus_DescribeImages(self, method, url, body,
headers):
return self._DescribeImages(method, url, body, headers)
def _services_Eucalyptus_DescribeAddresses(self, method, url, body,
headers):
return self._DescribeAddresses(method, url, body, headers)
def _services_Eucalyptus_RebootInstances(self, method, url, body,
headers):
return self._RebootInstances(method, url, body, headers)
def _services_Eucalyptus_TerminateInstances(self, method, url, body,
headers):
return self._TerminateInstances(method, url, body, headers)
def _services_Eucalyptus_RunInstances(self, method, url, body,
headers):
return self._RunInstances(method, url, body, headers)
def _services_Eucalyptus_CreateTags(self, method, url, body,
headers):
return self._CreateTags(method, url, body, headers)
def _services_Eucalyptus_DescribeInstanceTypes(self, method, url, body,
headers):
body = self.fixtures.load('describe_instance_types.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
class NimbusTests(EC2Tests):
def setUp(self):
NimbusNodeDriver.connectionCls.conn_classes = (None, EC2MockHttp)
EC2MockHttp.use_param = 'Action'
EC2MockHttp.type = None
self.driver = NimbusNodeDriver(key=EC2_PARAMS[0], secret=EC2_PARAMS[1],
host='some.nimbuscloud.com')
def test_ex_describe_addresses_for_node(self):
# overridden from EC2Tests -- Nimbus doesn't support elastic IPs.
node = Node('i-4382922a', None, None, None, None, self.driver)
ip_addresses = self.driver.ex_describe_addresses_for_node(node)
self.assertEqual(len(ip_addresses), 0)
def test_ex_describe_addresses(self):
# overridden from EC2Tests -- Nimbus doesn't support elastic IPs.
node = Node('i-4382922a', None, None, None, None, self.driver)
nodes_elastic_ips = self.driver.ex_describe_addresses([node])
self.assertEqual(len(nodes_elastic_ips), 1)
self.assertEqual(len(nodes_elastic_ips[node.id]), 0)
def test_list_sizes(self):
sizes = self.driver.list_sizes()
ids = [s.id for s in sizes]
self.assertTrue('m1.small' in ids)
self.assertTrue('m1.large' in ids)
self.assertTrue('m1.xlarge' in ids)
def test_list_nodes(self):
# overridden from EC2Tests -- Nimbus doesn't support elastic IPs.
node = self.driver.list_nodes()[0]
self.assertExecutedMethodCount(0)
public_ips = node.public_ips
self.assertEqual(node.id, 'i-4382922a')
self.assertEqual(len(node.public_ips), 1)
self.assertEqual(public_ips[0], '1.2.3.4')
self.assertEqual(node.extra['tags'], {})
node = self.driver.list_nodes()[1]
self.assertExecutedMethodCount(0)
public_ips = node.public_ips
self.assertEqual(node.id, 'i-8474834a')
self.assertEqual(len(node.public_ips), 1)
self.assertEqual(public_ips[0], '1.2.3.5')
self.assertEqual(node.extra['tags'],
{'Name': 'Test Server 2', 'Group': 'VPC Test'})
def test_ex_create_tags(self):
# Nimbus doesn't support creating tags so this one should be a
# passthrough
node = self.driver.list_nodes()[0]
self.driver.ex_create_tags(resource=node, tags={'foo': 'bar'})
self.assertExecutedMethodCount(0)
class EucTests(LibcloudTestCase, TestCaseMixin):
def setUp(self):
EucNodeDriver.connectionCls.conn_classes = (None, EucMockHttp)
EC2MockHttp.use_param = 'Action'
EC2MockHttp.type = None
self.driver = EucNodeDriver(key=EC2_PARAMS[0], secret=EC2_PARAMS[1],
host='some.eucalyptus.com', api_version='3.4.1')
def test_list_locations_response(self):
try:
self.driver.list_locations()
except Exception:
pass
else:
self.fail('Exception was not thrown')
def test_list_location(self):
pass
def test_list_sizes(self):
sizes = self.driver.list_sizes()
ids = [s.id for s in sizes]
self.assertEqual(len(ids), 18)
self.assertTrue('t1.micro' in ids)
self.assertTrue('m1.medium' in ids)
self.assertTrue('m3.xlarge' in ids)
class OutscaleTests(EC2Tests):
def setUp(self):
OutscaleSASNodeDriver.connectionCls.conn_classes = (None, EC2MockHttp)
EC2MockHttp.use_param = 'Action'
EC2MockHttp.type = None
self.driver = OutscaleSASNodeDriver(key=EC2_PARAMS[0],
secret=EC2_PARAMS[1],
host='some.outscalecloud.com')
def test_ex_create_network(self):
# overridden from EC2Tests -- Outscale don't support instance_tenancy
vpc = self.driver.ex_create_network('192.168.55.0/24',
name='Test VPC')
self.assertEqual('vpc-ad3527cf', vpc.id)
self.assertEqual('192.168.55.0/24', vpc.cidr_block)
self.assertEqual('pending', vpc.extra['state'])
def test_ex_copy_image(self):
# overridden from EC2Tests -- Outscale does not support copying images
image = self.driver.list_images()[0]
try:
self.driver.ex_copy_image('us-east-1', image,
name='Faux Image',
description='Test Image Copy')
except NotImplementedError:
pass
else:
self.fail('Exception was not thrown')
def test_ex_get_limits(self):
# overridden from EC2Tests -- Outscale does not support getting limits
try:
self.driver.ex_get_limits()
except NotImplementedError:
pass
else:
self.fail('Exception was not thrown')
def test_ex_create_network_interface(self):
# overridden from EC2Tests -- Outscale don't allow creating interfaces
subnet = self.driver.ex_list_subnets()[0]
try:
self.driver.ex_create_network_interface(
subnet,
name='Test Interface',
description='My Test')
except NotImplementedError:
pass
else:
self.fail('Exception was not thrown')
def test_ex_delete_network_interface(self):
# overridden from EC2Tests -- Outscale don't allow deleting interfaces
interface = self.driver.ex_list_network_interfaces()[0]
try:
self.driver.ex_delete_network_interface(interface)
except NotImplementedError:
pass
else:
self.fail('Exception was not thrown')
def test_ex_attach_network_interface_to_node(self):
# overridden from EC2Tests -- Outscale don't allow attaching interfaces
node = self.driver.list_nodes()[0]
interface = self.driver.ex_list_network_interfaces()[0]
try:
self.driver.ex_attach_network_interface_to_node(interface, node, 1)
except NotImplementedError:
pass
else:
self.fail('Exception was not thrown')
def test_ex_detach_network_interface(self):
# overridden from EC2Tests -- Outscale don't allow detaching interfaces
try:
self.driver.ex_detach_network_interface('eni-attach-2b588b47')
except NotImplementedError:
pass
else:
self.fail('Exception was not thrown')
def test_list_sizes(self):
sizes = self.driver.list_sizes()
ids = [s.id for s in sizes]
self.assertTrue('m1.small' in ids)
self.assertTrue('m1.large' in ids)
self.assertTrue('m1.xlarge' in ids)
if __name__ == '__main__':
sys.exit(unittest.main())
| {
"content_hash": "f6610f220f1e0d1ba511c5343d7884b6",
"timestamp": "",
"source": "github",
"line_count": 1804,
"max_line_length": 119,
"avg_line_length": 42.676829268292686,
"alnum_prop": 0.6125940069360558,
"repo_name": "jimbobhickville/libcloud",
"id": "79b5dc530b548c634c313405f41cc28a1eabe274",
"size": "77771",
"binary": false,
"copies": "1",
"ref": "refs/heads/trunk",
"path": "libcloud/test/compute/test_ec2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "Python",
"bytes": "4397714"
},
{
"name": "Shell",
"bytes": "13868"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division
# from io import StringIO
from glob import glob
import os.path as op
import tempfile
import simplejson as json
from pandas.api import types
import numpy as np
import pandas as pd
# from _common import cooler_cmp
from click.testing import CliRunner
import cooler
import pytest
### INGEST AND AGGREGATION ###
from cooler.cli.cload import pairs as cload_pairs
from cooler.cli.load import load
tmp = tempfile.gettempdir()
testdir = op.realpath(op.dirname(__file__))
datadir = op.join(testdir, "data")
def _run_cload_pairs(runner, binsize, extra_args):
args = [
op.join(datadir, "toy.chrom.sizes") + ":" + str(binsize),
op.join(datadir, "toy.pairs"),
"toy.{}.cool".format(binsize),
"-c1", "2",
"-p1", "3",
"-c2", "4",
"-p2", "5",
"--assembly", "toy",
"--chunksize", "10",
] + extra_args
return runner.invoke(cload_pairs, args)
def _cmp_pixels_2_bg(f_out, f_ref, one_based_ref=True):
# output, 1-based starts
out_df = cooler.Cooler(f_out).pixels(join=True)[:]
if one_based_ref:
out_df["start1"] += 1
out_df["start2"] += 1
# reference
ref_df = pd.read_csv(
f_ref,
sep="\t",
names=["chrom1", "start1", "end1", "chrom2", "start2", "end2", "count"],
)
assert np.all(out_df == ref_df)
# '--no-symmetric-upper'
# '--input-copy-status', 'unique|duplex',
@pytest.mark.parametrize(
"ref,extra_args",
[
("symm.upper", []), # reflect triu pairs
("symm.upper", ["--input-copy-status", "unique"]), # reflect triu pairs
("asymm", ["--no-symmetric-upper"]),
],
)
def test_cload_symm_asymm(ref, extra_args):
runner = CliRunner()
with runner.isolated_filesystem():
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
_cmp_pixels_2_bg("toy.2.cool", op.join(datadir, "toy.{}.2.bg2".format(ref)))
# '--temp-dir', '',
# '--no-delete-temp',
# '--max-merge', '',
@pytest.mark.parametrize(
"ref,extra_args", [("symm.upper", ["--temp-dir", ".", "--no-delete-temp"])]
)
def test_cload_mergepass(ref, extra_args):
runner = CliRunner()
with runner.isolated_filesystem():
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
_cmp_pixels_2_bg("toy.2.cool", op.join(datadir, "toy.{}.2.bg2".format(ref)))
assert len(cooler.fileops.list_coolers(glob("*.cool")[0])) > 0
# '--field', '',
# '--no-count', '',
def test_cload_field():
runner = CliRunner()
with runner.isolated_filesystem():
extra_args = ["--field", "score=8"]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
pixels = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels.columns and types.is_integer_dtype(
pixels.dtypes["count"]
)
assert "score" in pixels.columns and types.is_float_dtype(
pixels.dtypes["score"]
)
extra_args = ["--field", "count=8"]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
pixels = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels.columns and types.is_integer_dtype(
pixels.dtypes["count"]
)
assert np.allclose(pixels["count"][:], 0)
extra_args = ["--field", "count=8:dtype=float"]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
pixels = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels.columns and types.is_float_dtype(
pixels.dtypes["count"]
)
assert np.allclose(pixels["count"][:], 0.2)
extra_args = ["--field", "count=8:agg=min,dtype=float"]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
pixels = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels.columns and types.is_float_dtype(
pixels.dtypes["count"]
)
assert np.allclose(pixels["count"][:], 0.1)
## don't implement the --no-count for now
# extra_args = ['--field', 'score=7:dtype=float', '--no-count']
# result = _run_cload_pairs(runner, 2, extra_args)
# assert result.exit_code == 0
# pixels = cooler.Cooler('toy.2.cool').pixels()[:]
# assert 'count' not in pixels.columns
# assert 'score' in pixels.columns and types.is_float_dtype(pixels.dtypes['score'])
# '--metadata', '',
# '--zero-based',
# '--comment-char', '',
# '--storage-options', '',
def test_cload_other_options():
runner = CliRunner()
with runner.isolated_filesystem():
meta = {"foo": "bar", "number": 42}
with open("meta.json", "w") as f:
json.dump(meta, f)
extra_args = [
"--metadata",
"meta.json",
"--zero-based",
"--storage-options",
"shuffle=True,fletcher32=True,compression=lzf",
]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
c = cooler.Cooler("toy.2.cool")
assert c.info["metadata"] == meta
with c.open("r") as h5:
dset = h5["bins/start"]
assert dset.shuffle
assert dset.fletcher32
assert dset.compression == "lzf"
def _run_load(runner, matrix_file, format, binsize, extra_args):
args = [
"-f",
format,
op.join(datadir, "toy.chrom.sizes") + ":" + str(binsize),
op.join(datadir, matrix_file),
"toy.{}.cool".format(binsize),
"--assembly",
"toy",
"--chunksize",
"10",
] + extra_args
return runner.invoke(load, args)
# '--no-symmetric-upper'
# '--input-copy-status', 'unique|duplex',
@pytest.mark.parametrize(
"ref,extra_args",
[
("symm.upper", []), # reflect tril pairs
("symm.upper", ["--one-based", "--input-copy-status", "unique"]), # reflect tril pairs
("asymm", ["--one-based", "--no-symmetric-upper"]),
],
)
def test_load_symm_asymm(ref, extra_args):
runner = CliRunner()
with runner.isolated_filesystem():
ref = op.join(datadir, "toy.{}.2.bg2".format(ref))
result = _run_load(runner, ref, "bg2", 2, extra_args)
assert result.exit_code == 0
_cmp_pixels_2_bg("toy.2.cool", ref)
# '--field', '',
def test_load_field():
runner = CliRunner()
with runner.isolated_filesystem():
extra_args = ["--field", "count=7:dtype=float"]
result = _run_load(runner, "toy.symm.upper.2.bg2", "bg2", 2, extra_args)
assert result.exit_code == 0
pixels1 = cooler.Cooler(op.join(datadir, "toy.symm.upper.2.cool")).pixels()[:]
pixels2 = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels2.columns and types.is_float_dtype(
pixels2.dtypes["count"]
)
assert np.allclose(pixels1["count"][:], pixels2["count"][:])
def test_load_field2():
runner = CliRunner()
with runner.isolated_filesystem():
extra_args = ["--count-as-float"]
result = _run_load(runner, "toy.symm.upper.2.bg2", "bg2", 2, extra_args)
assert result.exit_code == 0
pixels1 = cooler.Cooler(op.join(datadir, "toy.symm.upper.2.cool")).pixels()[:]
pixels2 = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels2.columns and types.is_float_dtype(
pixels2.dtypes["count"]
)
assert np.allclose(pixels1["count"][:], pixels2["count"][:])
| {
"content_hash": "a63a5a31d3ce04a2593b1b333b041864",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 95,
"avg_line_length": 33.35217391304348,
"alnum_prop": 0.5716334245861036,
"repo_name": "mirnylab/cooler",
"id": "4ec4646ab421c4169b87b27a4682c52167ec8578",
"size": "7671",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_cli_ingest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "814"
},
{
"name": "Python",
"bytes": "368188"
}
],
"symlink_target": ""
} |
from cleo import Output
from webdevops.command import DoitCommand
from webdevops.taskloader import DockerPushTaskLoader
class DockerPushCommand(DoitCommand):
"""
Push images to registry/hub
docker:push
{docker images?* : Docker images (whitelist)}
{--dry-run : show only which images will be build}
{--t|threads=0 : threads}
{--r|retry=0 : retry}
{--whitelist=?* : image/tag whitelist }
{--blacklist=?* : image/tag blacklist }
"""
def run_task(self, configuration):
return self.run_doit(
task_loader=DockerPushTaskLoader(configuration),
configuration=configuration
)
| {
"content_hash": "4bddd61872e68a915e0ce98b26073182",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 72,
"avg_line_length": 28.615384615384617,
"alnum_prop": 0.5833333333333334,
"repo_name": "webdevops/Dockerfile",
"id": "85912b5b7c5973dc2f59e8269d77d4d8bb4efd8c",
"size": "1912",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/command/docker_push_command.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5848"
},
{
"name": "Dockerfile",
"bytes": "400888"
},
{
"name": "HTML",
"bytes": "3199"
},
{
"name": "JavaScript",
"bytes": "39856"
},
{
"name": "Jinja",
"bytes": "170509"
},
{
"name": "Makefile",
"bytes": "1931"
},
{
"name": "PHP",
"bytes": "3734"
},
{
"name": "PLSQL",
"bytes": "16"
},
{
"name": "Python",
"bytes": "349579"
},
{
"name": "Ruby",
"bytes": "120424"
},
{
"name": "Shell",
"bytes": "2572496"
},
{
"name": "VCL",
"bytes": "274"
}
],
"symlink_target": ""
} |
import unittest
from openprocurement.api.tests.base import snitch
from openprocurement.tender.belowthreshold.tests.complaint import TenderComplaintResourceTestMixin
from openprocurement.tender.belowthreshold.tests.complaint_blanks import (
# TenderComplaintDocumentResourceTest
not_found,
create_tender_complaint_document,
)
from openprocurement.tender.openua.tests.complaint import TenderUAComplaintResourceTestMixin
from openprocurement.tender.openua.tests.complaint_blanks import (
# TenderComplaintDocumentResourceTest
patch_tender_complaint_document,
# TenderLotAwardComplaintResourceTest
create_tender_lot_complaint,
)
from openprocurement.tender.openeu.tests.complaint_blanks import (
# TenderComplaintDocumentResourceTest
put_tender_complaint_document,
)
from openprocurement.tender.openeu.tests.base import (
BaseTenderContentWebTest,
test_bids,
test_lots,
)
class TenderComplaintResourceTest(BaseTenderContentWebTest,
TenderComplaintResourceTestMixin,
TenderUAComplaintResourceTestMixin):
initial_auth = ('Basic', ('broker', ''))
test_author = test_bids[0]["tenderers"][0]
class TenderLotAwardComplaintResourceTest(BaseTenderContentWebTest):
initial_lots = test_lots
test_author = test_bids[0]["tenderers"][0]
initial_auth = ('Basic', ('broker', ''))
test_create_tender_complaint = snitch(create_tender_lot_complaint)
class TenderComplaintDocumentResourceTest(BaseTenderContentWebTest):
test_author = test_bids[0]["tenderers"][0]
initial_auth = ('Basic', ('broker', ''))
def setUp(self):
super(TenderComplaintDocumentResourceTest, self).setUp()
# Create complaint
response = self.app.post_json('/tenders/{}/complaints'.format(
self.tender_id), {'data': {'title': 'complaint title',
'description': 'complaint description',
'author': test_bids[0]["tenderers"][0]
}})
complaint = response.json['data']
self.complaint_id = complaint['id']
self.complaint_owner_token = response.json['access']['token']
test_not_found = snitch(not_found)
test_create_tender_complaint_document = snitch(create_tender_complaint_document)
test_put_tender_complaint_document = snitch(put_tender_complaint_document)
test_patch_tender_complaint_document = snitch(patch_tender_complaint_document)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TenderComplaintDocumentResourceTest))
suite.addTest(unittest.makeSuite(TenderComplaintResourceTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| {
"content_hash": "ea0ef5861e3342d1a0a3821fa19367b1",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 98,
"avg_line_length": 36.506493506493506,
"alnum_prop": 0.6976165065812878,
"repo_name": "openprocurement/openprocurement.tender.openeu",
"id": "9a5b87de1a2394ceb4346ff84c5c62b1bbbc828b",
"size": "2835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openprocurement/tender/openeu/tests/complaint.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "878959"
}
],
"symlink_target": ""
} |
from biplist import *
from biplist import PlistWriter
import datetime
import io
import os
#from cStringIO import StringIO
import subprocess
import tempfile
from test_utils import *
import unittest
try:
unicode
except NameError:
unicode = str
class TestWritePlist(unittest.TestCase):
def setUp(self):
pass
def roundTrip(self, root, xml=False, expected=None):
# 'expected' is more fallout from the
# don't-write-empty-unicode-strings issue.
plist = writePlistToString(root, binary=(not xml))
self.assertTrue(len(plist) > 0)
readResult = readPlistFromString(plist)
self.assertEqual(readResult, (expected if expected is not None else root))
self.lintPlist(plist)
def lintPlist(self, plistString):
if os.path.exists('/usr/bin/plutil'):
f = tempfile.NamedTemporaryFile()
f.write(plistString)
f.flush()
name = f.name
(status, output) = run_command(['/usr/bin/plutil', '-lint', name])
if status != 0:
self.fail("plutil verification failed (status %d): %s" % (status, output))
def testXMLPlist(self):
self.roundTrip({'hello':'world'}, xml=True)
def testXMLPlistWithData(self):
for binmode in (True, False):
binplist = writePlistToString({'data': Data(b'\x01\xac\xf0\xff')}, binary=binmode)
plist = readPlistFromString(binplist)
self.assertTrue(isinstance(plist['data'], (Data, bytes)), \
"unable to encode then decode Data into %s plist" % ("binary" if binmode else "XML"))
def testConvertToXMLPlistWithData(self):
binplist = writePlistToString({'data': Data(b'\x01\xac\xf0\xff')})
plist = readPlistFromString(binplist)
xmlplist = writePlistToString(plist, binary=False)
self.assertTrue(len(xmlplist) > 0, "unable to convert plist with Data from binary to XML")
def testBoolRoot(self):
self.roundTrip(True)
self.roundTrip(False)
def testDuplicate(self):
l = ["foo" for i in range(0, 100)]
self.roundTrip(l)
def testListRoot(self):
self.roundTrip([1, 2, 3])
def testDictRoot(self):
self.roundTrip({'a':1, 'B':'d'})
def mixedNumericTypesHelper(self, cases):
result = readPlistFromString(writePlistToString(cases))
for i in range(0, len(cases)):
self.assertTrue(cases[i] == result[i])
self.assertEqual(type(cases[i]), type(result[i]), "Type mismatch on %d: %s != %s" % (i, repr(cases[i]), repr(result[i])))
def reprChecker(self, case):
result = readPlistFromString(writePlistToString(case))
self.assertEqual(repr(case), repr(result))
def testBoolsAndIntegersMixed(self):
self.mixedNumericTypesHelper([0, 1, True, False, None])
self.mixedNumericTypesHelper([False, True, 0, 1, None])
self.reprChecker({unicode('1'):[True, False, 1, 0], unicode('0'):[1, 2, 0, {unicode('2'):[1, 0, False]}]})
self.reprChecker([1, 1, 1, 1, 1, True, True, True, True])
def testFloatsAndIntegersMixed(self):
self.mixedNumericTypesHelper([0, 1, 1.0, 0.0, None])
self.mixedNumericTypesHelper([0.0, 1.0, 0, 1, None])
self.reprChecker({unicode('1'):[1.0, 0.0, 1, 0], unicode('0'):[1, 2, 0, {unicode('2'):[1, 0, 0.0]}]})
self.reprChecker([1, 1, 1, 1, 1, 1.0, 1.0, 1.0, 1.0])
def testSetRoot(self):
self.roundTrip(set((1, 2, 3)))
def testDatetime(self):
now = datetime.datetime.utcnow()
now = now.replace(microsecond=0)
self.roundTrip([now])
def testFloat(self):
self.roundTrip({'aFloat':1.23})
def testTuple(self):
result = writePlistToString({'aTuple':(1, 2.0, 'a'), 'dupTuple':('a', 'a', 'a', 'b', 'b')})
self.assertTrue(len(result) > 0)
readResult = readPlistFromString(result)
self.assertEqual(readResult['aTuple'], [1, 2.0, 'a'])
self.assertEqual(readResult['dupTuple'], ['a', 'a', 'a', 'b', 'b'])
def testComplicated(self):
root = {'preference':[1, 2, {'hi there':['a', 1, 2, {'yarrrr':123}]}]}
self.lintPlist(writePlistToString(root))
self.roundTrip(root)
def testString(self):
self.roundTrip(b'0')
self.roundTrip(b'')
self.roundTrip({b'a':b''})
def testLargeDict(self):
d = {}
for i in range(0, 1000):
d['%d' % i] = '%d' % i
self.roundTrip(d)
def testBools(self):
self.roundTrip([True, False])
def testUniques(self):
root = {'hi':'there', 'halloo':'there'}
self.roundTrip(root)
def testWriteToFile(self):
for is_binary in [True, False]:
path = '/var/tmp/test.plist'
writePlist([1, 2, 3], path, binary=is_binary)
self.assertTrue(os.path.exists(path))
with open(path, 'rb') as f:
self.lintPlist(f.read())
def testNone(self):
self.roundTrip(None)
self.roundTrip({'1':None})
self.roundTrip([None, None, None])
def testBadKeys(self):
try:
self.roundTrip({None:1})
self.fail("None is not a valid key in Cocoa.")
except InvalidPlistException as e:
pass
try:
self.roundTrip({Data(b"hello world"):1})
self.fail("Data is not a valid key in Cocoa.")
except InvalidPlistException as e:
pass
try:
self.roundTrip({1:1})
self.fail("Number is not a valid key in Cocoa.")
except InvalidPlistException as e:
pass
def testIntBoundaries(self):
edges = [0xff, 0xffff, 0xffffffff]
for edge in edges:
cases = [edge, edge-1, edge+1, edge-2, edge+2, edge*2, edge/2]
self.roundTrip(cases)
edges = [-pow(2, 7), pow(2, 7) - 1,
-pow(2, 15), pow(2, 15) - 1,
-pow(2, 31), pow(2, 31) - 1,
-pow(2, 63), pow(2, 64) - 1]
self.roundTrip(edges)
ioBytes = io.BytesIO()
writer = PlistWriter(ioBytes)
bytes = [(1, [pow(2, 7) - 1]),
(2, [pow(2, 15) - 1]),
(4, [pow(2, 31) - 1]),
(8, [-pow(2, 7), -pow(2, 15), -pow(2, 31), -pow(2, 63), pow(2, 63) - 1]),
(16, [pow(2, 64) - 1])
]
for bytelen, tests in bytes:
for test in tests:
got = writer.intSize(test)
self.assertEqual(bytelen, got, "Byte size is wrong. Expected %d, got %d" % (bytelen, got))
bytes_lists = [list(x) for x in bytes]
self.roundTrip(bytes_lists)
try:
self.roundTrip([0x10000000000000000, pow(2, 64)])
self.fail("2^64 should be too large for Core Foundation to handle.")
except InvalidPlistException as e:
pass
def testWriteData(self):
self.roundTrip(Data(b"woohoo"))
def testUnicode(self):
unicodeRoot = unicode("Mirror's Edge\u2122 for iPad")
writePlist(unicodeRoot, "/tmp/odd.plist")
self.roundTrip(unicodeRoot)
unicodeStrings = [unicode("Mirror's Edge\u2122 for iPad"), unicode('Weightbot \u2014 Track your Weight in Style')]
self.roundTrip(unicodeStrings)
self.roundTrip({unicode(""):unicode("")}, expected={b'':b''})
self.roundTrip(unicode(""), expected=b'')
def testUidWrite(self):
self.roundTrip({'$version': 100000,
'$objects':
['$null',
{'$class': Uid(3), 'somekey': Uid(2)},
'object value as string',
{'$classes': ['Archived', 'NSObject'], '$classname': 'Archived'}
],
'$top': {'root': Uid(1)}, '$archiver': 'NSKeyedArchiver'})
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "3f1bc7816fa1cd8e7b9dcef132e2772c",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 133,
"avg_line_length": 36.881278538812786,
"alnum_prop": 0.5535471090751517,
"repo_name": "ForrestAlfred/biplist",
"id": "2998941ef18e83d7e0f19377255ba3c5dfaac297",
"size": "8077",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_write.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "45744"
}
],
"symlink_target": ""
} |
from ..Qt import QtCore, QtGui, QtWidgets
class DockDrop(object):
"""Provides dock-dropping methods"""
def __init__(self, allowedAreas=None):
object.__init__(self)
if allowedAreas is None:
allowedAreas = ['center', 'right', 'left', 'top', 'bottom']
self.allowedAreas = set(allowedAreas)
self.setAcceptDrops(True)
self.dropArea = None
self.overlay = DropAreaOverlay(self)
self.overlay.raise_()
def resizeOverlay(self, size):
self.overlay.resize(size)
def raiseOverlay(self):
self.overlay.raise_()
def dragEnterEvent(self, ev):
src = ev.source()
if hasattr(src, 'implements') and src.implements('dock'):
#print "drag enter accept"
ev.accept()
else:
#print "drag enter ignore"
ev.ignore()
def dragMoveEvent(self, ev):
#print "drag move"
ld = ev.pos().x()
rd = self.width() - ld
td = ev.pos().y()
bd = self.height() - td
mn = min(ld, rd, td, bd)
if mn > 30:
self.dropArea = "center"
elif (ld == mn or td == mn) and mn > self.height()/3.:
self.dropArea = "center"
elif (rd == mn or ld == mn) and mn > self.width()/3.:
self.dropArea = "center"
elif rd == mn:
self.dropArea = "right"
elif ld == mn:
self.dropArea = "left"
elif td == mn:
self.dropArea = "top"
elif bd == mn:
self.dropArea = "bottom"
if ev.source() is self and self.dropArea == 'center':
#print " no self-center"
self.dropArea = None
ev.ignore()
elif self.dropArea not in self.allowedAreas:
#print " not allowed"
self.dropArea = None
ev.ignore()
else:
#print " ok"
ev.accept()
self.overlay.setDropArea(self.dropArea)
def dragLeaveEvent(self, ev):
self.dropArea = None
self.overlay.setDropArea(self.dropArea)
def dropEvent(self, ev):
area = self.dropArea
if area is None:
return
if area == 'center':
area = 'above'
self.area.moveDock(ev.source(), area, self)
self.dropArea = None
self.overlay.setDropArea(self.dropArea)
class DropAreaOverlay(QtWidgets.QWidget):
"""Overlay widget that draws drop areas during a drag-drop operation"""
def __init__(self, parent):
QtWidgets.QWidget.__init__(self, parent)
self.dropArea = None
self.hide()
self.setAttribute(QtCore.Qt.WA_TransparentForMouseEvents)
def setDropArea(self, area):
self.dropArea = area
if area is None:
self.hide()
else:
## Resize overlay to just the region where drop area should be displayed.
## This works around a Qt bug--can't display transparent widgets over QGLWidget
prgn = self.parent().rect()
rgn = QtCore.QRect(prgn)
w = min(30, prgn.width()/3.)
h = min(30, prgn.height()/3.)
if self.dropArea == 'left':
rgn.setWidth(w)
elif self.dropArea == 'right':
rgn.setLeft(rgn.left() + prgn.width() - w)
elif self.dropArea == 'top':
rgn.setHeight(h)
elif self.dropArea == 'bottom':
rgn.setTop(rgn.top() + prgn.height() - h)
elif self.dropArea == 'center':
rgn.adjust(w, h, -w, -h)
self.setGeometry(rgn)
self.show()
self.update()
def paintEvent(self, ev):
if self.dropArea is None:
return
p = QtGui.QPainter(self)
rgn = self.rect()
p.setBrush(QtGui.QBrush(QtGui.QColor(100, 100, 255, 50)))
p.setPen(QtGui.QPen(QtGui.QColor(50, 50, 150), 3))
p.drawRect(rgn)
| {
"content_hash": "c9985a362422180ba678519ad9730887",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 91,
"avg_line_length": 32.14173228346457,
"alnum_prop": 0.5159235668789809,
"repo_name": "mylxiaoyi/mypyqtgraph-qt5",
"id": "f21b8c252dad0cf1c0aa68b7bb59acc0e3f69efa",
"size": "4106",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyqtgraph/dockarea/DockDrop.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16424"
},
{
"name": "JavaScript",
"bytes": "96004"
},
{
"name": "Makefile",
"bytes": "4606"
},
{
"name": "Matlab",
"bytes": "1752"
},
{
"name": "Python",
"bytes": "2251168"
},
{
"name": "Shell",
"bytes": "4116"
}
],
"symlink_target": ""
} |
from django.core import mail
from django.contrib.auth.models import User, Group
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from projector.models import Task, Team, Membership
class EmailTest(TestCase):
fixtures = ['test_data.json']
def setUp(self):
self.client = Client()
def _create_task(self, user, project, summary='Task summary',
description='Example description', watch_changes=True):
"""
Creates task for given parameters. Assumes given user is already
logged in by ``self.client``.
Returns created task.
"""
task = Task.objects.get_for_project(project)
data = {
'summary': summary,
'description': description,
'status': task.status_id,
'priority': task.priority_id,
'type': task.type_id,
'owner': user.username,
'component': task.component_id,
}
if watch_changes:
data['watch_changes'] = u'on'
response = self.client.post(reverse('projector_task_create', kwargs={
'username': user.username,
'project_slug': project.slug}),
data=data,
follow=True)
self.assertEqual(response.status_code, 200)
return user.task_set.filter(project=project).order_by('-created_at')[0]
def test_tasks_notification(self):
# needs project to be created first
jack = User.objects.get(username='jack')
self.client.login(username='jack', password='jack')
project = jack.project_set.all()[0]
task = self._create_task(user=jack, project=project)
# Author of the project should be notified after task is created
self.assertEquals(len(mail.outbox), 1)
joe = User.objects.get(username='joe')
self.client.login(username='joe', password='joe')
task.watch(joe)
# After joe started to watch task, calling ``notify`` should send one
# mail with both jack and joe in recipient list
task.notify()
self.assertEquals(len(mail.outbox), 2)
self.assertEquals(
set([jack.email, joe.email]),
set(mail.outbox[1].recipients())
)
# If we set recipient list arbitrary we expect only those recipients
# would receive the message
recipient_list = [joe.email]
task.notify(recipient_list=recipient_list)
self.assertEquals(len(mail.outbox), 3)
self.assertEquals(recipient_list, mail.outbox[2].recipients())
# If project is private we need to ensure only members and author/owner
# would receive the message
task.project.public = False
task.project.save()
task.notify()
self.assertEquals(len(mail.outbox), 4)
self.assertEquals([jack.email], mail.outbox[3].recipients())
# But if joe would join Team associated with task's project then he
# could get the message
group = Group.objects.create(name='jack-n-joe')
joe.groups.add(group)
Team.objects.create(group=group, project=task.project)
task.notify()
self.assertEquals(len(mail.outbox), 5)
self.assertEquals(
set([jack.email, joe.email]),
set(mail.outbox[4].recipients()),
)
# Membership would be enough, too
joe.groups.remove(group)
Membership.objects.create(project=task.project, member=joe)
task.notify()
self.assertEquals(len(mail.outbox), 6)
self.assertEquals(
set([jack.email, joe.email]),
set(mail.outbox[5].recipients()),
)
| {
"content_hash": "314494aa4d0f4757e8c78705b5b4da40",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 79,
"avg_line_length": 35.48571428571429,
"alnum_prop": 0.6137949543746645,
"repo_name": "lukaszb/django-projector",
"id": "ab4a5eff842e207fc54aba433e8ddeef86379569",
"size": "3726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "projector/tests/test_emails.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "43056"
},
{
"name": "Python",
"bytes": "309918"
}
],
"symlink_target": ""
} |
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
import juliadoc
import sphinx_rtd_theme
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['juliadoc.julia',
'juliadoc.jlhelp']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'marketdatajl'
copyright = u'2015, Multiple'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2'
# The full version, including alpha/beta/rc tags.
release = '0.2.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
primary_domain = 'jl'
highlight_language = 'julia'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'julia'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [juliadoc.get_theme_dir(),
sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = juliadoc.default_sidebars()
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'marketdatajldoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'marketdatajl.tex', u'marketdatajl Documentation',
u'Multiple', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'marketdatajl', u'marketdatajl Documentation',
[u'Multiple'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'marketdatajl', u'marketdatajl Documentation',
u'Multiple', 'marketdatajl', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| {
"content_hash": "fcd3ca850a98a56755cf8b313800e33c",
"timestamp": "",
"source": "github",
"line_count": 251,
"max_line_length": 79,
"avg_line_length": 31.816733067729082,
"alnum_prop": 0.7062359128474831,
"repo_name": "JuliaPackageMirrors/MarketData.jl",
"id": "ad6969bbe9d5284f3cbe0ae920e8334970a73528",
"size": "8411",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Julia",
"bytes": "3090"
}
],
"symlink_target": ""
} |
import os
import sys
import datetime
import time
import azure.batch.batch_service_client as batch
import azure.batch.batch_auth as batchauth
import azure.batch.models as batchmodels
from azure.storage.table import TableService, TableBatch
from azure.storage.blob import BlockBlobService
def get_search_state(all_tasks_complete, any_failures):
if all_tasks_complete and any_failures:
return 'Error'
if all_tasks_complete:
return 'Complete'
return 'Running'
def get_query_state(task):
if task.state == batchmodels.TaskState.active:
return 'Waiting'
if task.state == batchmodels.TaskState.preparing:
return 'Waiting'
if task.state == batchmodels.TaskState.running:
return 'Running'
if task.state == batchmodels.TaskState.completed:
if task.execution_info.exit_code == 0:
return 'Success'
return 'Error'
def wait_for_tasks_to_complete(
table_service, batch_client, entity_pk, entity_rk, job_id):
"""
Returns when all tasks in the specified job reach the Completed state.
"""
while True:
entity = table_service.get_entity(
'SearchEntity', entity_pk, entity_rk)
tasks = batch_client.task.list(job_id)
incomplete_tasks = [task for task in tasks if
task.id != 'JobManager' and
task.state != batchmodels.TaskState.completed]
complete_tasks = [task for task in tasks if
task.id != 'JobManager' and
task.state == batchmodels.TaskState.completed]
failed_tasks = [task for task in complete_tasks if
task.execution_info.exit_code != 0 or
task.execution_info.scheduling_error is not None]
queries = table_service.query_entities(
'SearchQueryEntity',
filter="PartitionKey eq '{}'".format(entity.RowKey))
current_batch_count = 0
updateBatch = TableBatch()
for task in tasks:
matching_queries = [q for q in queries if q.RowKey == task.id]
if not matching_queries:
print('Could not find query {}'.format(task.id))
continue
query = matching_queries[0]
update = False
state = get_query_state(task)
if query._State != state:
query._State = state
update = True
if task.state == batchmodels.TaskState.running:
if not hasattr(query, 'StartTime'):
query.StartTime = task.execution_info.start_time
update = True
if task.state == batchmodels.TaskState.completed:
if not hasattr(query, 'EndTime'):
query.EndTime = task.execution_info.end_time
update = True
if update:
updateBatch.update_entity(query)
current_batch_count += 1
if current_batch_count == 99:
table_service.commit_batch('SearchQueryEntity', updateBatch)
current_batch_count = 0
updateBatch = TableBatch()
if current_batch_count > 0:
table_service.commit_batch('SearchQueryEntity', updateBatch)
all_tasks_complete = not incomplete_tasks
any_failures = len(failed_tasks) > 0
entity.CompletedTasks = len(complete_tasks)
entity._State = get_search_state(all_tasks_complete, any_failures)
if not incomplete_tasks:
entity.EndTime = datetime.datetime.utcnow()
table_service.update_entity('SearchEntity', entity)
return
else:
table_service.update_entity('SearchEntity', entity)
time.sleep(5)
if __name__ == '__main__':
storage_account = sys.argv[1]
storage_key = sys.argv[2]
batch_account = sys.argv[3]
batch_key = sys.argv[4]
batch_url = sys.argv[5]
job_id = sys.argv[6]
entity_pk = sys.argv[7]
entity_rk = sys.argv[8]
table_service = TableService(account_name=storage_account,
account_key=storage_key)
blob_service = BlockBlobService(account_name=storage_account,
account_key=storage_key)
credentials = batchauth.SharedKeyCredentials(batch_account, batch_key)
batch_client = batch.BatchServiceClient(credentials, base_url=batch_url)
wait_for_tasks_to_complete(table_service, batch_client, entity_pk, entity_rk, job_id)
| {
"content_hash": "08009ccc0f7283936399dbc53c5b17c6",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 89,
"avg_line_length": 36.125984251968504,
"alnum_prop": 0.5961203138622494,
"repo_name": "djolent/WebApp",
"id": "0965d16d937dcf3c101f0b04716f04a6b41b6bfc",
"size": "4588",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "LifeSciences/AzureBlast/AzureBlast/BatchScripts/SearchJobManager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "117"
},
{
"name": "Batchfile",
"bytes": "1111"
},
{
"name": "C#",
"bytes": "368441"
},
{
"name": "CSS",
"bytes": "21226"
},
{
"name": "JavaScript",
"bytes": "292456"
},
{
"name": "PowerShell",
"bytes": "3796"
},
{
"name": "Python",
"bytes": "14958"
},
{
"name": "Shell",
"bytes": "6364"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import SupportTicket
class SupportTicketAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'phone_number')
admin.site.register(SupportTicket, SupportTicketAdmin)
| {
"content_hash": "95dba8dcc2b3e58f120bdd7e75313b31",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 54,
"avg_line_length": 22.1,
"alnum_prop": 0.7737556561085973,
"repo_name": "TwilioDevEd/browser-calls-django",
"id": "17436f8534d16882ef0b873fefe653da405166c4",
"size": "221",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "browser_calls/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "471"
},
{
"name": "Dockerfile",
"bytes": "307"
},
{
"name": "HTML",
"bytes": "7409"
},
{
"name": "JavaScript",
"bytes": "4175"
},
{
"name": "Makefile",
"bytes": "465"
},
{
"name": "Python",
"bytes": "14316"
},
{
"name": "Shell",
"bytes": "901"
}
],
"symlink_target": ""
} |
from .app import app
import multiapp.routes
| {
"content_hash": "97865344e09c1d0355ade721e6421391",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 22,
"avg_line_length": 11.5,
"alnum_prop": 0.782608695652174,
"repo_name": "opendatadurban/multi-app",
"id": "d64cb9db5a48fcdc7b1be019d3e95b7110424f7e",
"size": "46",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "multiapp/core.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5606"
},
{
"name": "HTML",
"bytes": "82856"
},
{
"name": "JavaScript",
"bytes": "12262"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "24562"
}
],
"symlink_target": ""
} |
import sys
import os
import os.path
import subprocess
import xml.etree.ElementTree as xml
scriptDir = os.path.dirname(os.path.realpath(__file__))
sys.path.append( scriptDir + "/GeneratorUtils" )
import xml2objects
import generatedCode as gen
import containerWidget as cw
import dialogWidget as dw
import commonWidget as common
import httpControl as http
import cpvalidate
from optparse import OptionParser
## ERROR CODES ##
## 1 : missing command line arguments
## 2 : Did not pass xsd validation
## 3 : Did not pass logical validation
## 4 : Other error
### Start by validating the input and the xml ###
parser = OptionParser()
parser.add_option("-p", "--path", dest="path", default=scriptDir + "/../../samples/generated/",
help="destination path for generated files")
(options, args) = parser.parse_args()
path = options.path
generated = gen.Generator(scriptDir, path)
generated.confirmGenerate()
generated.initializeFiles()
if len(args) < 1 :
print >> sys.stderr, "ERROR - Please provide the xml file as input"
sys.exit(1)
for i in range(0, len(args)) :
xmlfile = args[i]
print "\nProcessing xmlfile: " + xmlfile + "\n"
if sys.platform != 'win32':
cpFile = os.path.join(scriptDir, "cp.xsd")
subprocArgs = "xmllint --noout --schema {0} {1}".format(cpFile, xmlfile)
rc = subprocess.call(subprocArgs, shell=True)
if rc != 0 :
print >> sys.stderr, "\nERROR - xml xsd validation did not pass"
sys.exit(2)
print "\nxml xsd validation passed"
else:
# There is no pure python way to easily do this
print "\nWARNING - skipping xml validation as xmllint is not available on this platform"
### Initialize the generated structure ###
o = xml2objects.ObjectBuilder(xmlfile)
if not cpvalidate.validate_all(o.root):
print >> sys.stderr, "\nERROR - logic xml validation did not pass"
sys.exit(3)
print "\nxml logic validation passed"
generated.setControlDeviceData(o.root.controlPanelDevice.name, o.root.controlPanelDevice.headerCode)
generated.setLanguageSets(o.root.controlPanelDevice.languageSet)
### Get and process HttpControlElements
if hasattr(o.root.controlPanelDevice, "url") :
httpControl = http.HttpControl(generated, o.root.controlPanelDevice.url)
httpControl.generate()
### Get and process all ControlPanels
if hasattr(o.root.controlPanelDevice, "controlPanels") :
for cp in o.root.controlPanelDevice.controlPanels.controlPanel :
generated.addControlPanel(cp.rootContainer, cp.attr["languageSet"])
parentName = generated.unitName + cp.rootContainer.name[:1].upper() + cp.rootContainer.name[1:] + "ControlPanel"
container = cw.Container(generated, cp.rootContainer, parentName, cp.attr["languageSet"], 1)
container.generate()
### Get and process all NotificationAction
if hasattr(o.root.controlPanelDevice, "notificationActions") :
if hasattr(o.root.controlPanelDevice.notificationActions, "dialog") :
for notDialog in o.root.controlPanelDevice.notificationActions.dialog :
generated.addNotificationAction(notDialog, notDialog.attr["languageSet"])
parentName = generated.unitName + notDialog.name[:1].upper() + notDialog.name[1:] + "NotificationAction"
dialog = dw.Dialog(generated, notDialog, parentName, notDialog.attr["languageSet"], 1)
dialog.generate()
if hasattr(o.root.controlPanelDevice.notificationActions, "container") :
for notContainer in o.root.controlPanelDevice.notificationActions.container :
generated.addNotificationAction(notContainer, notContainer.attr["languageSet"])
parentName = generated.unitName + notContainer.name[:1].upper() + notContainer.name[1:] + "NotificationAction"
container = cw.Container(generated, notContainer, parentName, notContainer.attr["languageSet"], 1)
container.generate()
### Finish up merging all the different components ###
generated.replaceInFiles()
generated.writeFiles()
sys.exit(0)
| {
"content_hash": "2376688bca02748cce4a2a22b7256007",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 142,
"avg_line_length": 42.61616161616162,
"alnum_prop": 0.6854704906375918,
"repo_name": "ADVANTECH-Corp/node-alljoyn",
"id": "58361fc591dc8a64b71c498a3b3edf9c0f4fd770",
"size": "5022",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "alljoyn/services/controlpanel/cpp/tools/CPSAppGenerator/generateCPSApp.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4995"
},
{
"name": "C",
"bytes": "638481"
},
{
"name": "C#",
"bytes": "775008"
},
{
"name": "C++",
"bytes": "12965271"
},
{
"name": "CSS",
"bytes": "17461"
},
{
"name": "Groff",
"bytes": "3068"
},
{
"name": "HTML",
"bytes": "45149"
},
{
"name": "Java",
"bytes": "4802386"
},
{
"name": "JavaScript",
"bytes": "606220"
},
{
"name": "Makefile",
"bytes": "42536"
},
{
"name": "Objective-C",
"bytes": "1829239"
},
{
"name": "Objective-C++",
"bytes": "856772"
},
{
"name": "Python",
"bytes": "559767"
},
{
"name": "Shell",
"bytes": "40697"
},
{
"name": "TeX",
"bytes": "817"
},
{
"name": "Visual Basic",
"bytes": "1285"
},
{
"name": "XSLT",
"bytes": "100471"
}
],
"symlink_target": ""
} |
"""Mimic pyquick exercise -- optional extra exercise.
Google's Python Class
Read in the file specified on the command line.
Do a simple split() on whitespace to obtain all the words in the file.
Rather than read the file line by line, it's easier to read
it into one giant string and split it once.
Build a "mimic" dict that maps each word that appears in the file
to a list of all the words that immediately follow that word in the file.
The list of words can be be in any order and should include
duplicates. So for example the key "and" might have the list
["then", "best", "then", "after", ...] listing
all the words which came after "and" in the text.
We'll say that the empty string is what comes before
the first word in the file.
With the mimic dict, it's fairly easy to emit random
text that mimics the original. Print a word, then look
up what words might come next and pick one at random as
the next work.
Use the empty string as the first word to prime things.
If we ever get stuck with a word that is not in the dict,
go back to the empty string to keep things moving.
Note: the standard python module 'random' includes a
random.choice(list) method which picks a random element
from a non-empty list.
For fun, feed your program to itself as input.
Could work on getting it to put in linebreaks around 70
columns, so the output looks better.
"""
import random
import sys
def mimic_dict(filename):
"""Returns mimic dict mapping each word to list of words which follow it."""
words_file = open(filename, "r")
words = words_file.read().split("\s")
word_dict = {}
for index , word in enumerate(words):
return
def print_mimic(mimic_dict, word):
"""Given mimic dict and start word, prints 200 random words."""
# +++your code here+++
return
# Provided main(), calls mimic_dict() and mimic()
def main():
if len(sys.argv) != 2:
print('usage: ./mimic.py file-to-read')
sys.exit(1)
dict = mimic_dict(sys.argv[1])
print_mimic(dict, '')
if __name__ == '__main__':
main()
| {
"content_hash": "071b716da7da1e8164b3fb88f4112216",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 78,
"avg_line_length": 29.735294117647058,
"alnum_prop": 0.718595450049456,
"repo_name": "cordjr/wttd-exercise",
"id": "708dddc43258148e25b5df30e94ee8a4e1b7c1f0",
"size": "2253",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "basic/mimic.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "191608"
},
{
"name": "HTML",
"bytes": "647778"
},
{
"name": "Python",
"bytes": "24384"
}
],
"symlink_target": ""
} |
import os.path
from subprocess import PIPE
from ovm.drivers.storage.generic import StorageDriver
from ovm.exceptions import DriverError
from ovm.utils.logger import logger
from ovm.utils.compat23 import Popen, etree
__all__ = ['LvmDriver']
class LvmDriver(StorageDriver):
DISK_FORMAT = 'raw'
def __init__(self):
super(LvmDriver, self).__init__()
def _create_logical_volume(self, name, size):
size = '{}G'.format(size)
vgname = self._params.get('volume_group')
if not vgname:
DriverError('Volume Groupe not set.')
args = ['lvcreate', '--size', size, '--name', str(name), vgname]
with Popen(args, stdout=PIPE, stderr=PIPE) as process:
process.wait()
if process.returncode != 0:
DriverError(process.stderr.read().decode('utf-8'))
def generate_xml(self, disk):
disktree = etree.Element('disk')
disktree.set('type', 'block')
disktree.set('device', 'disk')
driver = etree.SubElement(disktree, 'driver')
driver.set('name', 'qemu')
driver.set('type', LvmDriver.DISK_FORMAT)
driver.set('cache', 'writeback')
source = etree.SubElement(disktree, 'source')
source.set('dev', disk.path)
return disktree
def resize_disk(self, disk, new_size):
args = ['lvresize', '--size', '{}G'.format(new_size), disk.path]
with Popen(args, stdout=PIPE, stderr=PIPE) as process:
process.wait()
if process.returncode != 0:
DriverError(process.stderr.read().decode('utf-8'))
def import_image(self, image, name):
path = os.path.join(self._params.get('root'), name)
self._create_logical_volume(name, image.size)
if not os.path.exists(path):
raise DriverError('Volume group not created.')
image.copy_on_device(path, LvmDriver.DISK_FORMAT)
return path
def remove_disk(self, disk):
logger.debug('Trying to remove disk "%s".', disk.path)
args = ['lvremove', '--force', disk.path]
with Popen(args, stdout=PIPE, stderr=PIPE) as process:
process.wait()
if process.returncode != 0:
DriverError(process.stderr.read().decode('utf-8'))
| {
"content_hash": "fdcfa6e6185cca36464682954f14d5b7",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 72,
"avg_line_length": 31.819444444444443,
"alnum_prop": 0.6006110868616324,
"repo_name": "lightcode/OVM",
"id": "a8f86c1f41941f1a3f0c913f785915c5c82e3eef",
"size": "2339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ovm/drivers/storage/lvm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "93740"
},
{
"name": "Shell",
"bytes": "15995"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('pdfapp', '0036_auto_20170331_0223'),
('pdfapp', '0037_auto_20170331_0207'),
]
operations = [
]
| {
"content_hash": "24d6edf31312f05454b5d51bc93ebd99",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 46,
"avg_line_length": 18.785714285714285,
"alnum_prop": 0.6311787072243346,
"repo_name": "NumberZeroSoftware/PDFINVEST",
"id": "f5036bdc5f0ae8819eef43ab4b4777e722335c88",
"size": "336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pdfapp/migrations/0038_merge_20170331_0508.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "156926"
},
{
"name": "HTML",
"bytes": "84038"
},
{
"name": "JavaScript",
"bytes": "87356"
},
{
"name": "PHP",
"bytes": "10314"
},
{
"name": "Python",
"bytes": "156680"
},
{
"name": "Shell",
"bytes": "2121"
}
],
"symlink_target": ""
} |
"""
Helper funcs for tests
"""
import os
from .inputfiles import get_filepath
from junit2htmlreport import runner
def run_runner(tmpdir, filename, *extra):
"""
Run the junit2html program against the given report and produce a html doc
:param tmpdir:
:param filename:
:param extra: addtional arguments
:return:
"""
testfile = get_filepath(filename=filename)
if not len(extra):
outfile = os.path.join(tmpdir.strpath, "report.html")
runner.run([testfile, outfile])
assert os.path.exists(outfile)
else:
runner.run([testfile] + list(extra))
def test_runner_simple(tmpdir):
"""
Test the stand-alone app with a simple fairly empty junit file
:param tmpdir: py.test tmpdir fixture
:return:
"""
run_runner(tmpdir, "junit-simple_suites.xml")
| {
"content_hash": "2c94ed9da4030b45dd7ff57505457901",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 78,
"avg_line_length": 26.03125,
"alnum_prop": 0.6662665066026411,
"repo_name": "inorton/junit2html",
"id": "564002ab1042b52bc4c0c6e921c1d6cd4e65a487",
"size": "833",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1562"
},
{
"name": "Python",
"bytes": "50048"
}
],
"symlink_target": ""
} |
import time
from collections import defaultdict
class TimeIt(object):
def __init__(self, prefix=''):
self.prefix = prefix
self.start_times = dict()
self.elapsed_times = defaultdict(int)
def start(self, name):
assert(name not in self.start_times)
self.start_times[name] = time.time()
def stop(self, name):
assert(name in self.start_times)
self.elapsed_times[name] += time.time() - self.start_times[name]
self.start_times.pop(name)
def elapsed(self, name):
return self.elapsed_times[name]
def reset(self):
self.start_times = dict()
self.elapsed_times = defaultdict(int)
def __str__(self):
s = ''
names_elapsed = sorted(self.elapsed_times.items(), key=lambda x: x[1], reverse=True)
for name, elapsed in names_elapsed:
if 'total' not in self.elapsed_times:
s += '{0}: {1: <10} {2:.1f}\n'.format(self.prefix, name, elapsed)
else:
assert(self.elapsed_times['total'] >= max(self.elapsed_times.values()))
pct = 100. * elapsed / self.elapsed_times['total']
s += '{0}: {1: <10} {2:.1f} ({3:.1f}%)\n'.format(self.prefix, name, elapsed, pct)
if 'total' in self.elapsed_times:
times_summed = sum([t for k, t in self.elapsed_times.items() if k != 'total'])
other_time = self.elapsed_times['total'] - times_summed
assert(other_time >= 0)
pct = 100. * other_time / self.elapsed_times['total']
s += '{0}: {1: <10} {2:.1f} ({3:.1f}%)\n'.format(self.prefix, 'other', other_time, pct)
return s
timeit = TimeIt()
| {
"content_hash": "b6617570da1d3bd82132fba36c03e820",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 99,
"avg_line_length": 38.61363636363637,
"alnum_prop": 0.5609181871689229,
"repo_name": "berkeleydeeprlcourse/homework",
"id": "7751ac7df3d61c57eb74ae9d2e60ecaccecd1948",
"size": "1699",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hw4/timer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "375093"
},
{
"name": "Shell",
"bytes": "2982"
},
{
"name": "TeX",
"bytes": "27182"
}
],
"symlink_target": ""
} |
"""
PhraseApp
PhraseApp API for the interaction with the PhraseApp localization platform
OpenAPI spec version: 2.0
Contact: support@phraseapp.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class TranslationOrder(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'lsp': 'str',
'amount_in_cents': 'int',
'currency': 'str',
'message': 'str',
'state': 'str',
'translation_type': 'str',
'progress_percent': 'int',
'source_locale': 'LocalePreview',
'tag': 'str',
'styleguide': 'StyleguidePreview',
'unverify_translations_upon_delivery': 'bool',
'quality': 'bool',
'priority': 'bool',
'created_at': 'datetime',
'updated_at': 'datetime'
}
attribute_map = {
'id': 'id',
'lsp': 'lsp',
'amount_in_cents': 'amount_in_cents',
'currency': 'currency',
'message': 'message',
'state': 'state',
'translation_type': 'translation_type',
'progress_percent': 'progress_percent',
'source_locale': 'source_locale',
'tag': 'tag',
'styleguide': 'styleguide',
'unverify_translations_upon_delivery': 'unverify_translations_upon_delivery',
'quality': 'quality',
'priority': 'priority',
'created_at': 'created_at',
'updated_at': 'updated_at'
}
def __init__(self, id=None, lsp=None, amount_in_cents=None, currency=None, message=None, state=None, translation_type=None, progress_percent=None, source_locale=None, tag=None, styleguide=None, unverify_translations_upon_delivery=None, quality=None, priority=None, created_at=None, updated_at=None):
"""
TranslationOrder - a model defined in Swagger
"""
self._id = None
self._lsp = None
self._amount_in_cents = None
self._currency = None
self._message = None
self._state = None
self._translation_type = None
self._progress_percent = None
self._source_locale = None
self._tag = None
self._styleguide = None
self._unverify_translations_upon_delivery = None
self._quality = None
self._priority = None
self._created_at = None
self._updated_at = None
self.discriminator = None
if id is not None:
self.id = id
if lsp is not None:
self.lsp = lsp
if amount_in_cents is not None:
self.amount_in_cents = amount_in_cents
if currency is not None:
self.currency = currency
if message is not None:
self.message = message
if state is not None:
self.state = state
if translation_type is not None:
self.translation_type = translation_type
if progress_percent is not None:
self.progress_percent = progress_percent
if source_locale is not None:
self.source_locale = source_locale
if tag is not None:
self.tag = tag
if styleguide is not None:
self.styleguide = styleguide
if unverify_translations_upon_delivery is not None:
self.unverify_translations_upon_delivery = unverify_translations_upon_delivery
if quality is not None:
self.quality = quality
if priority is not None:
self.priority = priority
if created_at is not None:
self.created_at = created_at
if updated_at is not None:
self.updated_at = updated_at
@property
def id(self):
"""
Gets the id of this TranslationOrder.
:return: The id of this TranslationOrder.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this TranslationOrder.
:param id: The id of this TranslationOrder.
:type: str
"""
self._id = id
@property
def lsp(self):
"""
Gets the lsp of this TranslationOrder.
:return: The lsp of this TranslationOrder.
:rtype: str
"""
return self._lsp
@lsp.setter
def lsp(self, lsp):
"""
Sets the lsp of this TranslationOrder.
:param lsp: The lsp of this TranslationOrder.
:type: str
"""
self._lsp = lsp
@property
def amount_in_cents(self):
"""
Gets the amount_in_cents of this TranslationOrder.
:return: The amount_in_cents of this TranslationOrder.
:rtype: int
"""
return self._amount_in_cents
@amount_in_cents.setter
def amount_in_cents(self, amount_in_cents):
"""
Sets the amount_in_cents of this TranslationOrder.
:param amount_in_cents: The amount_in_cents of this TranslationOrder.
:type: int
"""
self._amount_in_cents = amount_in_cents
@property
def currency(self):
"""
Gets the currency of this TranslationOrder.
:return: The currency of this TranslationOrder.
:rtype: str
"""
return self._currency
@currency.setter
def currency(self, currency):
"""
Sets the currency of this TranslationOrder.
:param currency: The currency of this TranslationOrder.
:type: str
"""
self._currency = currency
@property
def message(self):
"""
Gets the message of this TranslationOrder.
:return: The message of this TranslationOrder.
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""
Sets the message of this TranslationOrder.
:param message: The message of this TranslationOrder.
:type: str
"""
self._message = message
@property
def state(self):
"""
Gets the state of this TranslationOrder.
:return: The state of this TranslationOrder.
:rtype: str
"""
return self._state
@state.setter
def state(self, state):
"""
Sets the state of this TranslationOrder.
:param state: The state of this TranslationOrder.
:type: str
"""
self._state = state
@property
def translation_type(self):
"""
Gets the translation_type of this TranslationOrder.
:return: The translation_type of this TranslationOrder.
:rtype: str
"""
return self._translation_type
@translation_type.setter
def translation_type(self, translation_type):
"""
Sets the translation_type of this TranslationOrder.
:param translation_type: The translation_type of this TranslationOrder.
:type: str
"""
self._translation_type = translation_type
@property
def progress_percent(self):
"""
Gets the progress_percent of this TranslationOrder.
:return: The progress_percent of this TranslationOrder.
:rtype: int
"""
return self._progress_percent
@progress_percent.setter
def progress_percent(self, progress_percent):
"""
Sets the progress_percent of this TranslationOrder.
:param progress_percent: The progress_percent of this TranslationOrder.
:type: int
"""
self._progress_percent = progress_percent
@property
def source_locale(self):
"""
Gets the source_locale of this TranslationOrder.
:return: The source_locale of this TranslationOrder.
:rtype: LocalePreview
"""
return self._source_locale
@source_locale.setter
def source_locale(self, source_locale):
"""
Sets the source_locale of this TranslationOrder.
:param source_locale: The source_locale of this TranslationOrder.
:type: LocalePreview
"""
self._source_locale = source_locale
@property
def tag(self):
"""
Gets the tag of this TranslationOrder.
:return: The tag of this TranslationOrder.
:rtype: str
"""
return self._tag
@tag.setter
def tag(self, tag):
"""
Sets the tag of this TranslationOrder.
:param tag: The tag of this TranslationOrder.
:type: str
"""
self._tag = tag
@property
def styleguide(self):
"""
Gets the styleguide of this TranslationOrder.
:return: The styleguide of this TranslationOrder.
:rtype: StyleguidePreview
"""
return self._styleguide
@styleguide.setter
def styleguide(self, styleguide):
"""
Sets the styleguide of this TranslationOrder.
:param styleguide: The styleguide of this TranslationOrder.
:type: StyleguidePreview
"""
self._styleguide = styleguide
@property
def unverify_translations_upon_delivery(self):
"""
Gets the unverify_translations_upon_delivery of this TranslationOrder.
:return: The unverify_translations_upon_delivery of this TranslationOrder.
:rtype: bool
"""
return self._unverify_translations_upon_delivery
@unverify_translations_upon_delivery.setter
def unverify_translations_upon_delivery(self, unverify_translations_upon_delivery):
"""
Sets the unverify_translations_upon_delivery of this TranslationOrder.
:param unverify_translations_upon_delivery: The unverify_translations_upon_delivery of this TranslationOrder.
:type: bool
"""
self._unverify_translations_upon_delivery = unverify_translations_upon_delivery
@property
def quality(self):
"""
Gets the quality of this TranslationOrder.
:return: The quality of this TranslationOrder.
:rtype: bool
"""
return self._quality
@quality.setter
def quality(self, quality):
"""
Sets the quality of this TranslationOrder.
:param quality: The quality of this TranslationOrder.
:type: bool
"""
self._quality = quality
@property
def priority(self):
"""
Gets the priority of this TranslationOrder.
:return: The priority of this TranslationOrder.
:rtype: bool
"""
return self._priority
@priority.setter
def priority(self, priority):
"""
Sets the priority of this TranslationOrder.
:param priority: The priority of this TranslationOrder.
:type: bool
"""
self._priority = priority
@property
def created_at(self):
"""
Gets the created_at of this TranslationOrder.
:return: The created_at of this TranslationOrder.
:rtype: datetime
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
"""
Sets the created_at of this TranslationOrder.
:param created_at: The created_at of this TranslationOrder.
:type: datetime
"""
self._created_at = created_at
@property
def updated_at(self):
"""
Gets the updated_at of this TranslationOrder.
:return: The updated_at of this TranslationOrder.
:rtype: datetime
"""
return self._updated_at
@updated_at.setter
def updated_at(self, updated_at):
"""
Sets the updated_at of this TranslationOrder.
:param updated_at: The updated_at of this TranslationOrder.
:type: datetime
"""
self._updated_at = updated_at
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, TranslationOrder):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "bb6eaf2f90dda74af6cddb563d4e3e05",
"timestamp": "",
"source": "github",
"line_count": 512,
"max_line_length": 303,
"avg_line_length": 26.599609375,
"alnum_prop": 0.5728761289375137,
"repo_name": "phrase/phraseapp-apispec",
"id": "bba43a312b91a7c24ba5967f0522d14e9c15b980",
"size": "13636",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "swagger/examples/python/phraseapp/models/translation_order.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "53"
},
{
"name": "PHP",
"bytes": "1376925"
},
{
"name": "Python",
"bytes": "826123"
},
{
"name": "Shell",
"bytes": "4151"
}
],
"symlink_target": ""
} |
import argparse
from target_interaction_finder import TargetInteractionFinder
def main():
parser = argparse.ArgumentParser(
description='''Extract subgraph(s) from XGMML network(s).''')
parser.add_argument('ids',
type=str,
help='identifier or file path to identifier list')
parser.add_argument('-c', '--column',
default=1,
type=int,
help='''column number for identifiers in identifier list file
(default = 1)''')
parser.add_argument('-s', '--source',
default='./source_xgmml/',
help='''source file or directory path(s) to database XGMML
(default = directory named "source_xgmml"
in current working directory)''')
parser.add_argument('-t', '--type',
default='rna',
help='input type (rna or protein; default = rna)')
parser.add_argument('-o', '--output',
default='./',
help='''output directory path
(default = current working directory)''')
"""
parser.add_argument('--cache',
default=True,
type=bool,
help='''Cache source_xgmml graph(s) and use in
subsequent runs to reduce parse time
(default = True)''')
"""
parser.add_argument('-d', '--debug',
default=False,
type=bool,
help='Show debug messages (default = False)')
args = parser.parse_args()
node_ids = args.ids
source_xgmml = args.source
node_id_list_column_index = args.column - 1
node_type = args.type
output_dir = args.output
#cache = args.cache
cache = True
debug = args.debug
return TargetInteractionFinder(
source_xgmml=source_xgmml,
node_ids=node_ids,
node_id_list_column_index=node_id_list_column_index,
node_type=node_type,
output_dir=output_dir,
cache=cache,
debug=debug)
if __name__ == '__main__':
main()
| {
"content_hash": "025d5a5f8319c5ab9f3edd5c745de68c",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 85,
"avg_line_length": 36.75806451612903,
"alnum_prop": 0.49758666081614744,
"repo_name": "ariutta/target-interaction-finder",
"id": "965988667a0cd5b85c46d70d87bad052b4693d33",
"size": "2326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "targetinteractionfinder/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "21458"
}
],
"symlink_target": ""
} |
import logging
import time
from collections import namedtuple
States = namedtuple('States', ['STARTING', 'DELETED', 'RUNNING', 'STOPPED',
'FAILED'])
INSTANCE_STATUS = States('starting', 'deleted', 'running', 'stopped', 'failed')
class PersistenceManager(object):
"""
wrapper around a store (metahosting.stores) to store instance information
for local management.
"""
def __init__(self, config, backend, publish):
"""
:param config: local_persistence part of the config
:param send_method: method to access messaging for sending info
:return: -
"""
logging.info('Initializing instance manager')
backend_store_class = backend
self.instances = backend_store_class(config=config)
self.publish = publish
logging.info('Instances stored: %r', self.get_instances().keys())
def get_instance(self, instance_id):
return self.instances.get(instance_id)
def get_instances(self):
return self.instances.get_all()
def set_instance(self, instance_id, instance):
instance['ts'] = time.time()
self.instances.update(instance_id, instance)
def update_instance_status(self, instance, status, publish=True):
instance['status'] = status
self.set_instance(instance['id'], instance)
if publish:
self.publish_instance(instance['id'])
def publish_instance(self, instance_id):
"""
Send information of the corresponding instance to the messaging system
:param instance_id: id of the instance that we publish information for
:return: -
"""
instance = self.get_instance(instance_id)
if instance is not None:
self.publish('info', 'instance_info', {'instance': instance})
| {
"content_hash": "62b511c1013488f6421d83819b4864f3",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 79,
"avg_line_length": 34.62264150943396,
"alnum_prop": 0.6343324250681199,
"repo_name": "BeneDicere/metahosting-worker",
"id": "9436ca2d4c7c9fe1b77c7af336298dd7c6cba56b",
"size": "1835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "workers/manager/persistence.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "29152"
}
],
"symlink_target": ""
} |
"""Tests for importing model evaluations."""
import json
import os
from unittest import mock
import google.auth
from google.cloud import aiplatform
from google_cloud_pipeline_components.container.experimental.evaluation.import_model_evaluation import main
from google_cloud_pipeline_components.container.experimental.evaluation.import_model_evaluation import to_value
from google_cloud_pipeline_components.container.experimental.evaluation.import_model_evaluation import PROBLEM_TYPE_TO_SCHEMA_URI
from google_cloud_pipeline_components.proto import gcp_resources_pb2
from google.protobuf import json_format
import unittest
SCHEMA_URI = 'gs://google-cloud-aiplatform/schema/modelevaluation/classification_metrics_1.0.0.yaml'
DISPLAY_NAME = 'sheesh'
PIPELINE_JOB_ID = 'thisisanid'
METRICS = (
'{"slicedMetrics": [{"singleOutputSlicingSpec": {},"metrics": '
'{"regression": {"rootMeanSquaredError": 49.40016,"meanAbsoluteError": '
'49.11752,"meanAbsolutePercentageError": 28428240000.0,"rSquared": '
'0.003327712,"rootMeanSquaredLogError": 3.6381562}}}, '
'{"singleOutputSlicingSpec": {"bytesValue": "MA=="}, "metrics": '
'{"regression": {"rootMeanSquaredError": 123}}}]}'
)
EXPLANATION_1 = (
'{"explanation": {"attributions": [{"featureAttributions": '
'{"BMI": 0.11054060991488765, "BPMeds": 0.0005584407939958813, '
'"TenYearCHD": 0.0043604360566092525, "age": '
'0.04241218286542097, "cigsPerDay": 0.03915845070606673, '
'"currentSmoker": 0.013928816831374438, "diaBP": '
'0.08652020580541842, "diabetes": 0.0003118844178436772, '
'"education": 0.048558606478108966, "glucose": '
'0.01140927870254686, "heartRate": 0.07151496486736889, '
'"prevalentHyp": 0.0041231606832198425, "prevalentStroke": '
'1.0034614999319733e-09, "sysBP": 0.06975447340775223, '
'"totChol": 0.039095268419742674}}]}}')
EXPLANATION_2 = ('{"explanation": {"attributions": [{"featureAttributions": '
'{"BMI": 0.11111111111111111, "BPMeds": 0.222222222222222222, '
'"TenYearCHD": 0.0043604360566092525, "age": '
'0.04241218286542097, "cigsPerDay": 0.03915845070606673, '
'"currentSmoker": 0.013928816831374438, "diaBP": '
'0.08652020580541842, "diabetes": 0.0003118844178436772, '
'"education": 0.048558606478108966, "glucose": '
'0.01140927870254686, "heartRate": 0.07151496486736889, '
'"prevalentHyp": 0.0041231606832198425, "prevalentStroke": '
'1.0034614999319733e-09, "sysBP": 0.06975447340775223, '
'"totChol": 0.039095268419742674}}]}}')
PROJECT = 'test_project'
LOCATION = 'test_location'
MODEL_EVAL_NAME = f'projects/{PROJECT}/locations/{LOCATION}/models/1234/evaluations/567'
def mock_api_call(test_func):
@mock.patch.object(google.auth, 'default', autospec=True)
@mock.patch.object(
aiplatform.gapic.ModelServiceClient,
'import_model_evaluation',
autospec=True)
@mock.patch.object(
aiplatform.gapic.ModelServiceClient,
'batch_import_model_evaluation_slices',
autospec=True)
def mocked_test(self, mock_import_slice, mock_import_eval, mock_auth):
mock_creds = mock.Mock(spec=google.auth.credentials.Credentials)
mock_creds.token = 'token'
mock_auth.return_value = [mock_creds, 'project']
import_model_evaluation_response = mock.Mock()
import_model_evaluation_response.name = MODEL_EVAL_NAME
mock_import_eval.return_value = import_model_evaluation_response
test_func(self, mock_import_eval, mock_import_slice)
return mocked_test
class ImportModelEvaluationTest(unittest.TestCase):
def setUp(self):
super(ImportModelEvaluationTest, self).setUp()
metrics_path = self.create_tempfile().full_path
with open(metrics_path, 'w') as f:
f.write(METRICS)
self.metrics_path = metrics_path
self._gcp_resources = os.path.join(
os.getenv('TEST_UNDECLARED_OUTPUTS_DIR'), 'gcp_resources')
self._project = PROJECT
self._location = LOCATION
self._model_name = f'projects/{self._project}/locations/{self._location}/models/1234'
self._model_evaluation_uri_prefix = f'https://{self._location}-aiplatform.googleapis.com/v1/'
if os.path.exists(self._gcp_resources):
os.remove(self._gcp_resources)
@mock_api_call
def test_import_model_evaluation(self, mock_api, _):
main([
'--metrics', self.metrics_path, '--problem_type', 'classification',
'--model_name', self._model_name, '--gcp_resources',
self._gcp_resources, '--display_name', DISPLAY_NAME
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['classification'],
'display_name':
DISPLAY_NAME,
})
@mock_api_call
def test_import_model_evaluation_with_metrics_explanation(self, mock_api, _):
explanation_path = self.create_tempfile().full_path
with open(explanation_path, 'w') as f:
f.write(EXPLANATION_1)
main([
'--metrics', self.metrics_path, '--metrics_explanation',
explanation_path, '--problem_type', 'classification', '--model_name',
self._model_name, '--gcp_resources', self._gcp_resources
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['classification'],
'model_explanation': {
'mean_attributions': [{
'feature_attributions':
to_value(
json.loads(EXPLANATION_1)['explanation']
['attributions'][0]['featureAttributions'])
}]
},
})
@mock_api_call
def test_import_model_evaluation_with_explanation(self, mock_api, _):
explanation_path = self.create_tempfile().full_path
with open(explanation_path, 'w') as f:
f.write(EXPLANATION_2)
main([
'--metrics', self.metrics_path, '--explanation', explanation_path,
'--problem_type', 'classification', '--model_name', self._model_name,
'--gcp_resources', self._gcp_resources
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['classification'],
'model_explanation': {
'mean_attributions': [{
'feature_attributions':
to_value(
json.loads(EXPLANATION_2)['explanation']
['attributions'][0]['featureAttributions'])
}]
},
})
@mock_api_call
def test_import_model_evaluation_with_explanation_overriding(
self, mock_api, _):
explanation_path_1 = self.create_tempfile().full_path
with open(explanation_path_1, 'w') as f:
f.write(EXPLANATION_1)
explanation_path_2 = self.create_tempfile().full_path
with open(explanation_path_2, 'w') as f:
f.write(EXPLANATION_2)
main([
'--metrics', self.metrics_path, '--metrics_explanation',
explanation_path_1, '--explanation', explanation_path_2,
'--problem_type', 'classification', '--model_name', self._model_name,
'--gcp_resources', self._gcp_resources
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['classification'],
'model_explanation': {
'mean_attributions': [{
'feature_attributions':
to_value(
json.loads(EXPLANATION_2)['explanation']
['attributions'][0]['featureAttributions'])
}]
},
})
@mock_api_call
def test_import_model_evaluation_gcp_resources(self, mock_api, _):
main([
'--metrics', self.metrics_path, '--problem_type', 'classification',
'--model_name', self._model_name, '--gcp_resources', self._gcp_resources
])
with open(self._gcp_resources) as f:
serialized_gcp_resources = f.read()
# Instantiate GCPResources Proto
model_evaluation_resources = json_format.Parse(
serialized_gcp_resources, gcp_resources_pb2.GcpResources())
self.assertLen(model_evaluation_resources.resources, 1)
model_evaluation_name = model_evaluation_resources.resources[
0].resource_uri[len(self._model_evaluation_uri_prefix):]
self.assertEqual(model_evaluation_name, MODEL_EVAL_NAME)
@mock_api_call
def test_import_model_evaluation_empty_explanation(self, mock_api, _):
import_model_evaluation_response = mock.Mock()
mock_api.return_value = import_model_evaluation_response
import_model_evaluation_response.name = self._model_name
main([
'--metrics', self.metrics_path, '--problem_type', 'classification',
'--model_name', self._model_name, '--gcp_resources',
self._gcp_resources, '--metrics_explanation',
"{{$.inputs.artifacts['metrics'].metadata['explanation_gcs_path']}}"
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['classification'],
})
@mock_api_call
def test_import_model_evaluation_empty_explanation_with_empty_explanation_override(
self, mock_api, _):
with self.assertRaises(SystemExit):
main([
'--metrics',
self.metrics_path,
'--problem_type',
'classification',
'--model_name',
self._model_name,
'--gcp_resources',
self._gcp_resources,
'--metrics_explanation',
"{{$.inputs.artifacts['metrics'].metadata['explanation_gcs_path']}}",
'--explanation',
"{{$.inputs.artifacts['explanation'].metadata['explanation_gcs_path']}}",
])
@mock_api_call
def test_import_model_evaluation_contains_explanation_with_empty_explanation_override(
self, mock_api, _):
explanation_path = self.create_tempfile().full_path
with open(explanation_path, 'w') as f:
f.write(EXPLANATION_1)
with self.assertRaises(SystemExit):
main([
'--metrics',
self.metrics_path,
'--problem_type',
'classification',
'--model_name',
self._model_name,
'--gcp_resources',
self._gcp_resources,
'--metrics_explanation',
explanation_path,
'--explanation',
"{{$.inputs.artifacts['explanation'].metadata['explanation_gcs_path']}}",
])
@mock_api_call
def test_import_model_evaluation_contains_explanation_with_explanation_override(
self, mock_api, _):
# This explanation file will get overridden.
explanation_path_ignored = self.create_tempfile().full_path
with open(explanation_path_ignored, 'w') as f:
f.write(EXPLANATION_1)
explanation_path = self.create_tempfile().full_path
with open(explanation_path, 'w') as f:
f.write(EXPLANATION_2)
main([
'--metrics', self.metrics_path, '--metrics_explanation',
explanation_path_ignored, '--explanation', explanation_path,
'--problem_type', 'classification', '--model_name', self._model_name,
'--gcp_resources', self._gcp_resources
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['classification'],
'model_explanation': {
'mean_attributions': [{
'feature_attributions':
to_value(
json.loads(EXPLANATION_2)['explanation']
['attributions'][0]['featureAttributions'])
}]
},
})
@mock_api_call
def test_import_model_evaluation_with_pipeline_id(self, mock_api, _):
main([
'--metrics', self.metrics_path, '--problem_type', 'classification',
'--model_name', self._model_name, '--pipeline_job_id', PIPELINE_JOB_ID,
'--gcp_resources', self._gcp_resources
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['classification'],
'metadata':
to_value({'pipeline_job_id': PIPELINE_JOB_ID})
})
@mock_api_call
def test_import_model_evaluation_with_pipeline_resource_name(
self, mock_api, _):
main([
'--metrics', self.metrics_path, '--problem_type', 'classification',
'--model_name', self._model_name, '--pipeline_job_resource_name',
PIPELINE_JOB_ID, '--gcp_resources', self._gcp_resources
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['classification'],
'metadata':
to_value({'pipeline_job_resource_name': PIPELINE_JOB_ID})
})
@mock_api_call
def test_import_model_evaluation_slice(self, _, mock_api):
main([
'--metrics', self.metrics_path, '--problem_type', 'classification',
'--model_name', self._model_name, '--pipeline_job_id', PIPELINE_JOB_ID,
'--gcp_resources', self._gcp_resources
])
mock_api.assert_called_with(
mock.ANY,
parent=MODEL_EVAL_NAME,
model_evaluation_slices=[{
'metrics_schema_uri':
SCHEMA_URI,
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][1]['metrics']
['regression']),
'slice_': {
'dimension': 'annotationSpec',
'value': '0',
}
}])
@mock_api_call
def test_import_model_evaluation_with_classification_metrics_artifact(
self, mock_api, _):
main([
'--classification_metrics', self.metrics_path, '--model_name',
self._model_name, '--gcp_resources', self._gcp_resources,
'--display_name', DISPLAY_NAME
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['classification'],
'display_name':
DISPLAY_NAME,
})
@mock_api_call
def test_import_model_evaluation_with_forecasting_metrics_artifact(
self, mock_api, _):
main([
'--forecasting_metrics', self.metrics_path, '--model_name',
self._model_name, '--gcp_resources', self._gcp_resources,
'--display_name', DISPLAY_NAME
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['forecasting'],
'display_name':
DISPLAY_NAME,
})
@mock_api_call
def test_import_model_evaluation_with_regression_metrics_artifact(
self, mock_api, _):
main([
'--regression_metrics', self.metrics_path, '--model_name',
self._model_name, '--gcp_resources', self._gcp_resources,
'--display_name', DISPLAY_NAME
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['regression'],
'display_name':
DISPLAY_NAME,
})
@mock_api_call
def test_import_model_evaluation_with_dataset_path(self, mock_api, _):
main([
'--regression_metrics',
self.metrics_path,
'--dataset_path',
'PATH',
'--model_name',
self._model_name,
'--gcp_resources',
self._gcp_resources,
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['regression'],
'metadata':
to_value({'evaluation_dataset_path': ['PATH']}),
})
@mock_api_call
def test_import_model_evaluation_with_dataset_paths(self, mock_api, _):
PATHS = ['path1', 'path2']
main([
'--regression_metrics',
self.metrics_path,
'--dataset_paths',
json.dumps(PATHS),
'--model_name',
self._model_name,
'--gcp_resources',
self._gcp_resources,
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['regression'],
'metadata':
to_value({'evaluation_dataset_path': PATHS}),
})
@mock_api_call
def test_import_model_evaluation_with_invalid_dataset_paths(
self, mock_api, _):
main([
'--regression_metrics',
self.metrics_path,
'--dataset_paths',
'{notvalidjson}',
'--model_name',
self._model_name,
'--gcp_resources',
self._gcp_resources,
])
mock_api.assert_called_with(
mock.ANY,
parent=self._model_name,
model_evaluation={
'metrics':
to_value(
json.loads(METRICS)['slicedMetrics'][0]['metrics']
['regression']),
'metrics_schema_uri':
PROBLEM_TYPE_TO_SCHEMA_URI['regression'],
})
| {
"content_hash": "175e1fee6937af93e0bc76c365dbd472",
"timestamp": "",
"source": "github",
"line_count": 551,
"max_line_length": 129,
"avg_line_length": 36.421052631578945,
"alnum_prop": 0.5691150089695037,
"repo_name": "kubeflow/pipelines",
"id": "9b440d4a27750461c9f6fb93960458e19f5397e4",
"size": "20068",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "components/google-cloud/tests/container/experimental/evaluation/test_import_model_evaluation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "799"
},
{
"name": "CSS",
"bytes": "2171"
},
{
"name": "Dockerfile",
"bytes": "49331"
},
{
"name": "Go",
"bytes": "1903937"
},
{
"name": "HTML",
"bytes": "3656"
},
{
"name": "JavaScript",
"bytes": "544297"
},
{
"name": "Jinja",
"bytes": "938"
},
{
"name": "Jupyter Notebook",
"bytes": "359548"
},
{
"name": "Makefile",
"bytes": "22164"
},
{
"name": "Mustache",
"bytes": "23652"
},
{
"name": "PowerShell",
"bytes": "3194"
},
{
"name": "Python",
"bytes": "5684887"
},
{
"name": "Shell",
"bytes": "264595"
},
{
"name": "Smarty",
"bytes": "8295"
},
{
"name": "Starlark",
"bytes": "553"
},
{
"name": "TypeScript",
"bytes": "4294958"
}
],
"symlink_target": ""
} |
"""
Django settings for count_word_api project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from decouple import Csv, config
from dj_database_url import parse as dburl
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=False, cast=bool)
ALLOWED_HOSTS = config('ALLOWED_HOSTS', default=[], cast=Csv())
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# 3rd apps
'rest_framework',
'rest_framework.authtoken',
# my apps
'count_word_api.core',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'count_word_api.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'count_word_api.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
default_dburl = 'sqlite:///' + os.path.join(BASE_DIR, 'db.sqlite3')
DATABASES = {
'default': config('DATABASE_URL', default=default_dburl, cast=dburl),
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': (
'django.contrib.auth.password_validation.'
'UserAttributeSimilarityValidator'
)
},
{
'NAME': (
'django.contrib.auth.password_validation.MinimumLengthValidator'
)
},
{
'NAME': (
'django.contrib.auth.password_validation.CommonPasswordValidator'
)
},
{
'NAME': (
'django.contrib.auth.password_validation.NumericPasswordValidator'
)
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
| {
"content_hash": "034bd96564f1588b765218896c374ad3",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 79,
"avg_line_length": 26.350746268656717,
"alnum_prop": 0.6726139903709997,
"repo_name": "rafaelhenrique/count_word_api",
"id": "4ea47769a27643776ebfeb9231a0cb9f293226cb",
"size": "3531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "count_word_api/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2060"
},
{
"name": "Python",
"bytes": "12204"
}
],
"symlink_target": ""
} |
from .singlepageobjectbase import SinglePageObjectBase
from .locator import Locator
from . import commands
class PageObject(SinglePageObjectBase):
"""Main general-purpose page object class."""
DEFAULT_ROOT_NAME = 'page_object'
"""Default name for a root page object."""
def __init__(self, locator, chain=True, webdriver=None, name=None):
"""
Create a page object and its children.
:param str locator: Xpath describing location of the page
object in the DOM.
:param bool chain: Determines whether to chain locator
to its parent.
:param webdriver: Only needs to be provided for root page object.
:param str name: Name used when the page object is a root.
:type webdriver: :class:`selenium.webdriver.Remote` instance or None
:Example usage:
.. code-block:: python
from pageobject import PageObject
top_panel = PageObject("//*[@class='topPanel']")
"""
self._initialized_locator = locator
self._chain = chain
self._webdriver = webdriver
self._name = name
self._parent = None
self.init_children()
# commands
is_enabled = commands.is_enabled
is_displayed = commands.is_displayed
is_visible = commands.is_visible # deprecated
is_interactive = commands.is_interactive
wait_until_displayed = commands.wait_until_displayed
wait_for_visible = commands.wait_for_visible # deprecated
wait_until_enabled = commands.wait_until_enabled
wait_for_enabled = commands.wait_for_enabled # deprecated
wait_until_interactive = commands.wait_until_interactive
wait_for_interactive = commands.wait_for_interactive # deprecated
click = commands.click
clear = commands.clear
get_value = commands.get_value
set_value = commands.set_value
move_to = commands.move_to
send_keys = commands.send_keys
| {
"content_hash": "237f50051896258f8042cfc72aeed63f",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 76,
"avg_line_length": 32.88135593220339,
"alnum_prop": 0.668041237113402,
"repo_name": "lukas-linhart/pageobject",
"id": "3c67e8cab420e8a6775cc85ed0e6a6addc1b4eb3",
"size": "1940",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pageobject/pageobject.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "103810"
}
],
"symlink_target": ""
} |
'''
Alerts
======
The following methods allow for interaction into the Tenable.sc
`Alert <https://docs.tenable.com/sccv/api/Alert.html>`_ API.
Methods available on ``sc.alerts``:
.. rst-class:: hide-signature
.. autoclass:: AlertAPI
:members:
.. _iCal Date-Time:
https://tools.ietf.org/html/rfc5545#section-3.3.5
.. _iCal Recurrence Rule:
https://tools.ietf.org/html/rfc5545#section-3.3.10
'''
from .base import SCEndpoint
from tenable.utils import dict_merge
class AlertAPI(SCEndpoint):
def _constructor(self, *filters, **kw):
'''
Handles building an alert document.
'''
# call the analysis query constructor to assemble a query.
if len(filters) > 0:
# checking to see if data_type was passed. If it wasn't then we
# will set the value to the default of 'vuln'.
if 'data_type' not in kw:
kw['data_type'] = 'vuln'
kw['type'] = kw['data_type']
kw['tool'] = ''
kw = self._query_constructor(*filters, **kw)
del(kw['data_type'])
del(kw['query']['tool'])
del(kw['tool'])
elif 'query_id' in kw:
kw = self._query_constructor(*filters, **kw)
if 'name' in kw:
kw['name'] = self._check('name', kw['name'], str)
if 'description' in kw:
kw['description'] = self._check(
'description', kw['description'], str)
if 'query' in kw:
kw['query'] = self._check('query', kw['query'], dict)
if 'always_exec_on_trigger' in kw:
# executeOnEveryTrigger expected a boolean response as a lower-case
# string. We will accept a boolean and then transform it into a
# string value.
kw['executeOnEveryTrigger'] = str(self._check(
'always_exec_on_trigger', kw['always_exec_on_trigger'], bool)).lower()
del(kw['always_exec_on_trigger'])
if 'trigger' in kw:
# here we will be expanding the trigger from the common format of
# tuples that we are using within pytenable into the native
# supported format that SecurityCenter expects.
self._check('trigger', kw['trigger'], tuple)
kw['triggerName'] = self._check(
'triggerName', kw['trigger'][0], str)
kw['triggerOperator'] = self._check(
'triggerOperator', kw['trigger'][1], str,
choices=['>=', '<=', '=', '!='])
kw['triggerValue'] = self._check(
'triggerValue', kw['trigger'][2], str)
del(kw['trigger'])
# hand off the building the schedule sub-document to the schedule
# document builder.
if 'schedule' in kw:
kw['schedule'] = self._schedule_constructor(kw['schedule'])
# FR: at some point we should start looking into checking and
# normalizing the action document.
return kw
def list(self, fields=None):
'''
Retrieves the list of alerts.
:sc-api:`alert: list <Alert.html#AlertRESTReference-/alert>`
Args:
fields (list, optional):
A list of attributes to return for each alert.
Returns:
:obj:`dict`:
A list of alert resources.
Examples:
>>> for alert in sc.alerts.list()['manageable']:
... pprint(alert)
'''
params = dict()
if fields:
params['fields'] = ','.join([self._check('field', f, str)
for f in fields])
return self._api.get('alert', params=params).json()['response']
def details(self, id, fields=None):
'''
Returns the details for a specific alert.
:sc-api:`alert: details <Alert.html#AlertRESTReference-/alert/{id}>`
Args:
id (int): The identifier for the alert.
fields (list, optional): A list of attributes to return.
Returns:
:obj:`dict`:
The alert resource record.
Examples:
>>> alert = sc.alerts.detail(1)
>>> pprint(alert)
'''
params = dict()
if fields:
params['fields'] = ','.join([self._check('field', f, str) for f in fields])
return self._api.get('alert/{}'.format(self._check('id', id, int)),
params=params).json()['response']
def create(self, *filters, **kw):
'''
Creates a new alert. The fields below are explicitly checked, however
any additional parameters mentioned in the API docs can be passed to the
document constructor.
:sc-api:'alert: create <Alert.html#alert_POST>`
Args:
*filters (tuple):
A filter expression. Refer to the detailed description within
the analysis endpoint documentation for more details on how to
formulate filter expressions.
data_type (str):
The type of filters being used. Must be of type ``lce``,
``ticket``, ``user``, or ``vuln``. If no data-type is
specified, then the default of ``vuln`` will be set.
name (str): The name of the alert.
description (str, optional): A description for the alert.
trigger (tuple):
A tuple in the filter-tuple format detailing what would
constitute a trigger. For example: ``('sumip', '=', '1000')``.
always_exec_on_trigger (bool, optional):
Should the trigger always execute when the trigger fires, or
only execute when the returned data changes?
Default is ``False``.
schedule (dict, optional):
This is the schedule dictionary that will inform Tenable.sc how
often to run the alert. If left unspecified then we will
default to ``{'type': 'never'}``.
action (list):
The action(s) that will be performed when the alert trigger
fires. Each action is a dictionary detailing what type of
action to take, and the details surrounding that action. The
supported type of actions are ``email``, ``notifications``,
``report``, ``scan``, ``syslog``, and ``ticket``. The following
examples lay out each type of action as an example:
* Email action type:
.. code-block:: python
{'type': 'email',
'subject': 'Example Email Subject',
'message': 'Example Email Body'
'addresses': 'user1@company.com\\nuser2@company.com',
'users': [{'id': 1}, {'id': 2}],
'includeResults': 'true'}
* Notification action type:
.. code-block:: python
{'type': 'notification',
'message': 'Example notification',
'users': [{'id': 1}, {'id': 2}]}
* Report action type:
.. code-block:: python
{'type': 'report',
'report': {'id': 1}}
* Scan action type:
.. code-block:: python
{'type': 'scan',
'scan': {'id': 1}}
* Syslog action type:
.. code-block:: python
{'type': 'syslog',
'host': '127.0.0.1',
'port': '514',
'message': 'Example Syslog Message',
'severity': 'Critical'}
* Ticket action type:
.. code-block:: python
{'type': 'ticket',
'assignee': {'id': 1},
'name': 'Example Ticket Name',
'description': 'Example Ticket Description',
'notes': 'Example Ticket Notes'}
Returns:
:obj:`dict`:
The alert resource created.
Examples:
>>> sc.alerts.create(
... ('severity', '=', '3,4'),
... ('exploitAvailable', '=', 'true'),
... trigger=('sumip', '>=', '100'),
... name='Too many High or Critical and Exploitable',
... action=[{
... 'type': 'notification',
... 'message': 'Too many High or Crit Exploitable Vulns',
... 'users': [{'id': 1}]
... }])
'''
payload = self._constructor(*filters, **kw)
return self._api.post('alert', json=payload).json()['response']
def edit(self, id, *filters, **kw):
'''
Updates an existing alert. All fields are optional and will overwrite
the existing value.
:sc-api:`alert: update <Alert.html#alert_id_PATCH>`
Args:
if (int): The alert identifier.
*filters (tuple):
A filter expression. Refer to the detailed description within
the analysis endpoint documentation for more details on how to
formulate filter expressions.
data_type (str):
The type of filters being used. Must be of type ``lce``,
``ticket``, ``user``, or ``vuln``. If no data-type is
specified, then the default of ``vuln`` will be set.
name (str, optional): The name of the alert.
description (str, optional): A description for the alert.
trigger (tuple, optional):
A tuple in the filter-tuple format detailing what would
constitute a trigger. For example: ``('sumip', '=', '1000')``.
always_exec_on_trigger (bool, optional):
Should the trigger always execute when the trigger fires, or
only execute when the returned data changes?
Default is ``False``.
schedule (dict, optional):
This is the schedule dictionary that will inform Tenable.sc how
often to run the alert. If left unspecified then we will
default to ``{'type': 'never'}``.
action (list):
The action(s) that will be performed when the alert trigger
fires. Each action is a dictionary detailing what type of
action to take, and the details surrounding that action.
Returns:
:obj:`dict`:
The modified alert resource.
Examples:
>>> sc.alerts.update(1, name='New Alert Name')
'''
payload = self._constructor(*filters, **kw)
return self._api.patch('alert/{}'.format(
self._check('id', id, int)), json=payload).json()['response']
def delete(self, id):
'''
Deletes the specified alert.
:sc-api:`alert: delete <Alert.html#alert_id_DELETE>`
Args:
id (int): The alert identifier.
Returns:
:obj:`str`:
The response code of the action.
Examples:
>>> sc.alerts.delete(1)
'''
return self._api.delete('alert/{}'.format(
self._check('id', id, int))).json()['response']
def execute(self, id):
'''
Executes the specified alert.
:sc-api:`alert: execute <Alert.html#AlertRESTReference-/alert/{id}/execute>`
Args:
id (int): The alert identifier.
Returns:
:obj:`dict`:
The alert resource.
'''
return self._api.post('alert/{}/execute'.format(
self._check('id', id, int))).json()['response']
| {
"content_hash": "965a660469a37d323cba29e1d8404896",
"timestamp": "",
"source": "github",
"line_count": 325,
"max_line_length": 87,
"avg_line_length": 36.53230769230769,
"alnum_prop": 0.5123389202391981,
"repo_name": "tenable/pyTenable",
"id": "939c8b159e8a3ec413c0365f772fe408c83df803",
"size": "11873",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tenable/sc/alerts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2769266"
}
],
"symlink_target": ""
} |
from time import sleep, time
from redis.exceptions import ResponseError
import inspect
from functools import wraps
import sys
from nose.plugins.skip import SkipTest
from nose.plugins.attrib import attr
import redis
import redis.client
import fakeredis
from datetime import datetime, timedelta
PY2 = sys.version_info[0] == 2
if not PY2:
long = int
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
def redis_must_be_running(cls):
# This can probably be improved. This will determines
# at import time if the tests should be run, but we probably
# want it to be when the tests are actually run.
try:
r = redis.StrictRedis('localhost', port=6379)
r.ping()
except redis.ConnectionError:
redis_running = False
else:
redis_running = True
if not redis_running:
for name, attribute in inspect.getmembers(cls):
if name.startswith('test_'):
@wraps(attribute)
def skip_test(*args, **kwargs):
raise SkipTest("Redis is not running.")
setattr(cls, name, skip_test)
cls.setUp = lambda x: None
cls.tearDown = lambda x: None
return cls
def key_val_dict(size=100):
return dict([(b'key:' + bytes([i]), b'val:' + bytes([i]))
for i in range(size)])
class TestFakeStrictRedis(unittest.TestCase):
def setUp(self):
self.redis = self.create_redis()
def tearDown(self):
self.redis.flushall()
del self.redis
if sys.version_info >= (3,):
def assertItemsEqual(self, a, b):
return self.assertCountEqual(a, b)
def create_redis(self, db=0):
return fakeredis.FakeStrictRedis(db=db)
def test_flushdb(self):
self.redis.set('foo', 'bar')
self.assertEqual(self.redis.keys(), [b'foo'])
self.assertEqual(self.redis.flushdb(), True)
self.assertEqual(self.redis.keys(), [])
def test_set_then_get(self):
self.assertEqual(self.redis.set('foo', 'bar'), True)
self.assertEqual(self.redis.get('foo'), b'bar')
def test_get_does_not_exist(self):
self.assertEqual(self.redis.get('foo'), None)
def test_get_with_non_str_keys(self):
self.assertEqual(self.redis.set('2', 'bar'), True)
self.assertEqual(self.redis.get(2), b'bar')
def test_get_invalid_type(self):
self.assertEqual(self.redis.hset('foo', 'key', 'value'), 1)
with self.assertRaises(redis.ResponseError):
self.redis.get('foo')
def test_set_non_str_keys(self):
self.assertEqual(self.redis.set(2, 'bar'), True)
self.assertEqual(self.redis.get(2), b'bar')
self.assertEqual(self.redis.get('2'), b'bar')
def test_getbit(self):
self.redis.setbit('foo', 3, 1)
self.assertEqual(self.redis.getbit('foo', 0), 0)
self.assertEqual(self.redis.getbit('foo', 1), 0)
self.assertEqual(self.redis.getbit('foo', 2), 0)
self.assertEqual(self.redis.getbit('foo', 3), 1)
self.assertEqual(self.redis.getbit('foo', 4), 0)
self.assertEqual(self.redis.getbit('foo', 100), 0)
def test_multiple_bits_set(self):
self.redis.setbit('foo', 1, 1)
self.redis.setbit('foo', 3, 1)
self.redis.setbit('foo', 5, 1)
self.assertEqual(self.redis.getbit('foo', 0), 0)
self.assertEqual(self.redis.getbit('foo', 1), 1)
self.assertEqual(self.redis.getbit('foo', 2), 0)
self.assertEqual(self.redis.getbit('foo', 3), 1)
self.assertEqual(self.redis.getbit('foo', 4), 0)
self.assertEqual(self.redis.getbit('foo', 5), 1)
self.assertEqual(self.redis.getbit('foo', 6), 0)
def test_unset_bits(self):
self.redis.setbit('foo', 1, 1)
self.redis.setbit('foo', 2, 0)
self.redis.setbit('foo', 3, 1)
self.assertEqual(self.redis.getbit('foo', 1), 1)
self.redis.setbit('foo', 1, 0)
self.assertEqual(self.redis.getbit('foo', 1), 0)
self.redis.setbit('foo', 3, 0)
self.assertEqual(self.redis.getbit('foo', 3), 0)
def test_setbits_and_getkeys(self):
# The bit operations and the get commands
# should play nicely with each other.
self.redis.setbit('foo', 1, 1)
self.assertEqual(self.redis.get('foo'), b'@')
self.redis.setbit('foo', 2, 1)
self.assertEqual(self.redis.get('foo'), b'`')
self.redis.setbit('foo', 3, 1)
self.assertEqual(self.redis.get('foo'), b'p')
self.redis.setbit('foo', 9, 1)
self.assertEqual(self.redis.get('foo'), b'p@')
self.redis.setbit('foo', 54, 1)
self.assertEqual(self.redis.get('foo'), b'p@\x00\x00\x00\x00\x02')
def test_bitcount(self):
self.redis.delete('foo')
self.assertEqual(self.redis.bitcount('foo'), 0)
self.redis.setbit('foo', 1, 1)
self.assertEqual(self.redis.bitcount('foo'), 1)
self.redis.setbit('foo', 8, 1)
self.assertEqual(self.redis.bitcount('foo'), 2)
self.assertEqual(self.redis.bitcount('foo', 1, 1), 1)
self.redis.setbit('foo', 57, 1)
self.assertEqual(self.redis.bitcount('foo'), 3)
self.redis.set('foo', ' ')
self.assertEqual(self.redis.bitcount('foo'), 1)
def test_getset_not_exist(self):
val = self.redis.getset('foo', 'bar')
self.assertEqual(val, None)
self.assertEqual(self.redis.get('foo'), b'bar')
def test_getset_exists(self):
self.redis.set('foo', 'bar')
val = self.redis.getset('foo', 'baz')
self.assertEqual(val, b'bar')
def test_setitem_getitem(self):
self.assertEqual(self.redis.keys(), [])
self.redis['foo'] = 'bar'
self.assertEqual(self.redis['foo'], b'bar')
def test_strlen(self):
self.redis['foo'] = 'bar'
self.assertEqual(self.redis.strlen('foo'), 3)
self.assertEqual(self.redis.strlen('noexists'), 0)
def test_substr(self):
self.redis['foo'] = 'one_two_three'
self.assertEqual(self.redis.substr('foo', 0), b'one_two_three')
self.assertEqual(self.redis.substr('foo', 0, 2), b'one')
self.assertEqual(self.redis.substr('foo', 4, 6), b'two')
self.assertEqual(self.redis.substr('foo', -5), b'three')
def test_substr_noexist_key(self):
self.assertEqual(self.redis.substr('foo', 0), b'')
self.assertEqual(self.redis.substr('foo', 10), b'')
self.assertEqual(self.redis.substr('foo', -5, -1), b'')
def test_append(self):
self.assertTrue(self.redis.set('foo', 'bar'))
self.assertEqual(self.redis.append('foo', 'baz'), 6)
self.assertEqual(self.redis.get('foo'), b'barbaz')
def test_incr_with_no_preexisting_key(self):
self.assertEqual(self.redis.incr('foo'), 1)
self.assertEqual(self.redis.incr('bar', 2), 2)
def test_incr_by(self):
self.assertEqual(self.redis.incrby('foo'), 1)
self.assertEqual(self.redis.incrby('bar', 2), 2)
def test_incr_preexisting_key(self):
self.redis.set('foo', 15)
self.assertEqual(self.redis.incr('foo', 5), 20)
self.assertEqual(self.redis.get('foo'), b'20')
def test_incr_bad_type(self):
self.redis.set('foo', 'bar')
with self.assertRaises(redis.ResponseError):
self.redis.incr('foo', 15)
def test_incrbyfloat(self):
self.redis.set('foo', 0)
self.assertEqual(self.redis.incrbyfloat('foo', 1.0), 1.0)
self.assertEqual(self.redis.incrbyfloat('foo', 1.0), 2.0)
def test_incrbyfloat_with_noexist(self):
self.assertEqual(self.redis.incrbyfloat('foo', 1.0), 1.0)
self.assertEqual(self.redis.incrbyfloat('foo', 1.0), 2.0)
def test_incrbyfloat_bad_type(self):
self.redis.set('foo', 'bar')
with self.assertRaisesRegexp(redis.ResponseError, 'not a valid float'):
self.redis.incrbyfloat('foo', 1.0)
def test_decr(self):
self.redis.set('foo', 10)
self.assertEqual(self.redis.decr('foo'), 9)
self.assertEqual(self.redis.get('foo'), b'9')
def test_decr_newkey(self):
self.redis.decr('foo')
self.assertEqual(self.redis.get('foo'), b'-1')
def test_decr_badtype(self):
self.redis.set('foo', 'bar')
with self.assertRaises(redis.ResponseError):
self.redis.decr('foo', 15)
def test_exists(self):
self.assertFalse('foo' in self.redis)
self.redis.set('foo', 'bar')
self.assertTrue('foo' in self.redis)
def test_contains(self):
self.assertFalse(self.redis.exists('foo'))
self.redis.set('foo', 'bar')
self.assertTrue(self.redis.exists('foo'))
def test_rename(self):
self.redis.set('foo', 'unique value')
self.assertTrue(self.redis.rename('foo', 'bar'))
self.assertEqual(self.redis.get('foo'), None)
self.assertEqual(self.redis.get('bar'), b'unique value')
def test_rename_nonexistent_key(self):
with self.assertRaises(redis.ResponseError):
self.redis.rename('foo', 'bar')
def test_renamenx_doesnt_exist(self):
self.redis.set('foo', 'unique value')
self.assertTrue(self.redis.renamenx('foo', 'bar'))
self.assertEqual(self.redis.get('foo'), None)
self.assertEqual(self.redis.get('bar'), b'unique value')
def test_rename_does_exist(self):
self.redis.set('foo', 'unique value')
self.redis.set('bar', 'unique value2')
self.assertFalse(self.redis.renamenx('foo', 'bar'))
self.assertEqual(self.redis.get('foo'), b'unique value')
self.assertEqual(self.redis.get('bar'), b'unique value2')
def test_mget(self):
self.redis.set('foo', 'one')
self.redis.set('bar', 'two')
self.assertEqual(self.redis.mget(['foo', 'bar']), [b'one', b'two'])
self.assertEqual(self.redis.mget(['foo', 'bar', 'baz']),
[b'one', b'two', None])
self.assertEqual(self.redis.mget('foo', 'bar'), [b'one', b'two'])
self.assertEqual(self.redis.mget('foo', 'bar', None),
[b'one', b'two', None])
def test_mgset_with_no_keys_raises_error(self):
with self.assertRaisesRegexp(
redis.ResponseError, 'wrong number of arguments'):
self.redis.mget([])
def test_mset_with_no_keys_raises_error(self):
with self.assertRaisesRegexp(
redis.RedisError, 'MSET requires'):
self.redis.mset([])
def test_mset(self):
self.assertEqual(self.redis.mset({'foo': 'one', 'bar': 'two'}), True)
self.assertEqual(self.redis.mset({'foo': 'one', 'bar': 'two'}), True)
self.assertEqual(self.redis.mget('foo', 'bar'), [b'one', b'two'])
def test_mset_accepts_kwargs(self):
self.assertEqual(
self.redis.mset(foo='one', bar='two'), True)
self.assertEqual(
self.redis.mset(foo='one', baz='three'), True)
self.assertEqual(self.redis.mget('foo', 'bar', 'baz'),
[b'one', b'two', b'three'])
def test_msetnx(self):
self.assertEqual(self.redis.msetnx({'foo': 'one', 'bar': 'two'}),
True)
self.assertEqual(self.redis.msetnx({'bar': 'two', 'baz': 'three'}),
False)
self.assertEqual(self.redis.mget('foo', 'bar', 'baz'),
[b'one', b'two', None])
def test_setex(self):
self.assertEqual(self.redis.setex('foo', 100, 'bar'), True)
self.assertEqual(self.redis.get('foo'), b'bar')
def test_setex_using_timedelta(self):
self.assertEqual(
self.redis.setex('foo', timedelta(seconds=100), 'bar'), True)
self.assertEqual(self.redis.get('foo'), b'bar')
def test_setnx(self):
self.assertEqual(self.redis.setnx('foo', 'bar'), True)
self.assertEqual(self.redis.get('foo'), b'bar')
self.assertEqual(self.redis.setnx('foo', 'baz'), False)
self.assertEqual(self.redis.get('foo'), b'bar')
def test_delete(self):
self.redis['foo'] = 'bar'
self.assertEqual(self.redis.delete('foo'), True)
self.assertEqual(self.redis.get('foo'), None)
def test_echo(self):
self.assertEqual(self.redis.echo(b'hello'), b'hello')
self.assertEqual(self.redis.echo('hello'), b'hello')
@attr('slow')
def test_delete_expire(self):
self.redis.set("foo", "bar", ex=1)
self.redis.delete("foo")
self.redis.set("foo", "bar")
sleep(2)
self.assertEqual(self.redis.get("foo"), b'bar')
def test_delete_multiple(self):
self.redis['one'] = 'one'
self.redis['two'] = 'two'
self.redis['three'] = 'three'
# Since redis>=2.7.6 returns number of deleted items.
self.assertEqual(self.redis.delete('one', 'two'), 2)
self.assertEqual(self.redis.get('one'), None)
self.assertEqual(self.redis.get('two'), None)
self.assertEqual(self.redis.get('three'), b'three')
self.assertEqual(self.redis.delete('one', 'two'), False)
# If any keys are deleted, True is returned.
self.assertEqual(self.redis.delete('two', 'three'), True)
self.assertEqual(self.redis.get('three'), None)
def test_delete_nonexistent_key(self):
self.assertEqual(self.redis.delete('foo'), False)
# Tests for the list type.
def test_rpush_then_lrange_with_nested_list1(self):
self.assertEqual(self.redis.rpush('foo', [long(12345), long(6789)]), 1)
self.assertEqual(self.redis.rpush('foo', [long(54321), long(9876)]), 2)
self.assertEqual(self.redis.lrange(
'foo', 0, -1), ['[12345L, 6789L]', '[54321L, 9876L]'] if PY2 else
[b'[12345, 6789]', b'[54321, 9876]'])
self.redis.flushall()
def test_rpush_then_lrange_with_nested_list2(self):
self.assertEqual(self.redis.rpush('foo', [long(12345), 'banana']), 1)
self.assertEqual(self.redis.rpush('foo', [long(54321), 'elephant']), 2)
self.assertEqual(self.redis.lrange(
'foo', 0, -1),
['[12345L, \'banana\']', '[54321L, \'elephant\']'] if PY2 else
[b'[12345, \'banana\']', b'[54321, \'elephant\']'])
self.redis.flushall()
def test_rpush_then_lrange_with_nested_list3(self):
self.assertEqual(self.redis.rpush('foo', [long(12345), []]), 1)
self.assertEqual(self.redis.rpush('foo', [long(54321), []]), 2)
self.assertEqual(self.redis.lrange(
'foo', 0, -1), ['[12345L, []]', '[54321L, []]'] if PY2 else
[b'[12345, []]', b'[54321, []]'])
self.redis.flushall()
def test_lpush_then_lrange_all(self):
self.assertEqual(self.redis.lpush('foo', 'bar'), 1)
self.assertEqual(self.redis.lpush('foo', 'baz'), 2)
self.assertEqual(self.redis.lpush('foo', 'bam', 'buzz'), 4)
self.assertEqual(self.redis.lrange('foo', 0, -1),
[b'buzz', b'bam', b'baz', b'bar'])
def test_lpush_then_lrange_portion(self):
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'two')
self.redis.lpush('foo', 'three')
self.redis.lpush('foo', 'four')
self.assertEqual(self.redis.lrange('foo', 0, 2),
[b'four', b'three', b'two'])
self.assertEqual(self.redis.lrange('foo', 0, 3),
[b'four', b'three', b'two', b'one'])
def test_lpush_key_does_not_exist(self):
self.assertEqual(self.redis.lrange('foo', 0, -1), [])
def test_lpush_with_nonstr_key(self):
self.redis.lpush(1, 'one')
self.redis.lpush(1, 'two')
self.redis.lpush(1, 'three')
self.assertEqual(self.redis.lrange(1, 0, 2),
[b'three', b'two', b'one'])
self.assertEqual(self.redis.lrange('1', 0, 2),
[b'three', b'two', b'one'])
def test_llen(self):
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'two')
self.redis.lpush('foo', 'three')
self.assertEqual(self.redis.llen('foo'), 3)
def test_llen_no_exist(self):
self.assertEqual(self.redis.llen('foo'), 0)
def test_lrem_postitive_count(self):
self.redis.lpush('foo', 'same')
self.redis.lpush('foo', 'same')
self.redis.lpush('foo', 'different')
self.redis.lrem('foo', 2, 'same')
self.assertEqual(self.redis.lrange('foo', 0, -1), [b'different'])
def test_lrem_negative_count(self):
self.redis.lpush('foo', 'removeme')
self.redis.lpush('foo', 'three')
self.redis.lpush('foo', 'two')
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'removeme')
self.redis.lrem('foo', -1, 'removeme')
# Should remove it from the end of the list,
# leaving the 'removeme' from the front of the list alone.
self.assertEqual(self.redis.lrange('foo', 0, -1),
[b'removeme', b'one', b'two', b'three'])
def test_lrem_zero_count(self):
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'one')
self.redis.lrem('foo', 0, 'one')
self.assertEqual(self.redis.lrange('foo', 0, -1), [])
def test_lrem_default_value(self):
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'one')
self.redis.lrem('foo', 0, 'one')
self.assertEqual(self.redis.lrange('foo', 0, -1), [])
def test_lrem_does_not_exist(self):
self.redis.lpush('foo', 'one')
self.redis.lrem('foo', 0, 'one')
# These should be noops.
self.redis.lrem('foo', -2, 'one')
self.redis.lrem('foo', 2, 'one')
def test_lrem_return_value(self):
self.redis.lpush('foo', 'one')
count = self.redis.lrem('foo', 0, 'one')
self.assertEqual(count, 1)
self.assertEqual(self.redis.lrem('foo', 0, 'one'), 0)
def test_rpush(self):
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.redis.rpush('foo', 'three')
self.redis.rpush('foo', 'four', 'five')
self.assertEqual(self.redis.lrange('foo', 0, -1),
[b'one', b'two', b'three', b'four', b'five'])
def test_lpop(self):
self.assertEqual(self.redis.rpush('foo', 'one'), 1)
self.assertEqual(self.redis.rpush('foo', 'two'), 2)
self.assertEqual(self.redis.rpush('foo', 'three'), 3)
self.assertEqual(self.redis.lpop('foo'), b'one')
self.assertEqual(self.redis.lpop('foo'), b'two')
self.assertEqual(self.redis.lpop('foo'), b'three')
def test_lpop_empty_list(self):
self.redis.rpush('foo', 'one')
self.redis.lpop('foo')
self.assertEqual(self.redis.lpop('foo'), None)
# Verify what happens if we try to pop from a key
# we've never seen before.
self.assertEqual(self.redis.lpop('noexists'), None)
def test_lset(self):
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.redis.rpush('foo', 'three')
self.redis.lset('foo', 0, 'four')
self.redis.lset('foo', -2, 'five')
self.assertEqual(self.redis.lrange('foo', 0, -1),
[b'four', b'five', b'three'])
def test_lset_index_out_of_range(self):
self.redis.rpush('foo', 'one')
with self.assertRaises(redis.ResponseError):
self.redis.lset('foo', 3, 'three')
def test_rpushx(self):
self.redis.rpush('foo', 'one')
self.redis.rpushx('foo', 'two')
self.redis.rpushx('bar', 'three')
self.assertEqual(self.redis.lrange('foo', 0, -1), [b'one', b'two'])
self.assertEqual(self.redis.lrange('bar', 0, -1), [])
def test_ltrim(self):
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.redis.rpush('foo', 'three')
self.redis.rpush('foo', 'four')
self.assertTrue(self.redis.ltrim('foo', 1, 3))
self.assertEqual(self.redis.lrange('foo', 0, -1), [b'two', b'three',
b'four'])
self.assertTrue(self.redis.ltrim('foo', 1, -1))
self.assertEqual(self.redis.lrange('foo', 0, -1), [b'three', b'four'])
def test_ltrim_with_non_existent_key(self):
self.assertTrue(self.redis.ltrim('foo', 0, -1))
def test_lindex(self):
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.assertEqual(self.redis.lindex('foo', 0), b'one')
self.assertEqual(self.redis.lindex('foo', 4), None)
self.assertEqual(self.redis.lindex('bar', 4), None)
def test_lpushx(self):
self.redis.lpush('foo', 'two')
self.redis.lpushx('foo', 'one')
self.redis.lpushx('bar', 'one')
self.assertEqual(self.redis.lrange('foo', 0, -1), [b'one', b'two'])
self.assertEqual(self.redis.lrange('bar', 0, -1), [])
def test_rpop(self):
self.assertEqual(self.redis.rpop('foo'), None)
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.assertEqual(self.redis.rpop('foo'), b'two')
self.assertEqual(self.redis.rpop('foo'), b'one')
self.assertEqual(self.redis.rpop('foo'), None)
def test_linsert(self):
self.redis.rpush('foo', 'hello')
self.redis.rpush('foo', 'world')
self.redis.linsert('foo', 'before', 'world', 'there')
self.assertEqual(self.redis.lrange('foo', 0, -1),
[b'hello', b'there', b'world'])
def test_rpoplpush(self):
self.assertEqual(self.redis.rpoplpush('foo', 'bar'), None)
self.assertEqual(self.redis.lpop('bar'), None)
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.redis.rpush('bar', 'one')
self.assertEqual(self.redis.rpoplpush('foo', 'bar'), b'two')
self.assertEqual(self.redis.lrange('foo', 0, -1), [b'one'])
self.assertEqual(self.redis.lrange('bar', 0, -1), [b'two', b'one'])
def test_rpoplpush_to_nonexistent_destination(self):
self.redis.rpush('foo', 'one')
self.assertEqual(self.redis.rpoplpush('foo', 'bar'), b'one')
self.assertEqual(self.redis.rpop('bar'), b'one')
def test_blpop_single_list(self):
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.redis.rpush('foo', 'three')
self.assertEqual(self.redis.blpop(['foo'], timeout=1),
(b'foo', b'one'))
def test_blpop_test_multiple_lists(self):
self.redis.rpush('baz', 'zero')
self.assertEqual(self.redis.blpop(['foo', 'baz'], timeout=1),
(b'baz', b'zero'))
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
# bar has nothing, so the returned value should come
# from foo.
self.assertEqual(self.redis.blpop(['bar', 'foo'], timeout=1),
(b'foo', b'one'))
self.redis.rpush('bar', 'three')
# bar now has something, so the returned value should come
# from bar.
self.assertEqual(self.redis.blpop(['bar', 'foo'], timeout=1),
(b'bar', b'three'))
self.assertEqual(self.redis.blpop(['bar', 'foo'], timeout=1),
(b'foo', b'two'))
def test_blpop_allow_single_key(self):
# blpop converts single key arguments to a one element list.
self.redis.rpush('foo', 'one')
self.assertEqual(self.redis.blpop('foo', timeout=1), (b'foo', b'one'))
def test_brpop_test_multiple_lists(self):
self.redis.rpush('baz', 'zero')
self.assertEqual(self.redis.brpop(['foo', 'baz'], timeout=1),
(b'baz', b'zero'))
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.assertEqual(self.redis.brpop(['bar', 'foo'], timeout=1),
(b'foo', b'two'))
def test_brpop_single_key(self):
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.assertEqual(self.redis.brpop('foo', timeout=1),
(b'foo', b'two'))
def test_brpoplpush_multi_keys(self):
self.assertEqual(self.redis.lpop('bar'), None)
self.redis.rpush('foo', 'one')
self.redis.rpush('foo', 'two')
self.assertEqual(self.redis.brpoplpush('foo', 'bar', timeout=1),
b'two')
self.assertEqual(self.redis.lrange('bar', 0, -1), [b'two'])
@attr('slow')
def test_blocking_operations_when_empty(self):
self.assertEqual(self.redis.blpop(['foo'], timeout=1),
None)
self.assertEqual(self.redis.blpop(['bar', 'foo'], timeout=1),
None)
self.assertEqual(self.redis.brpop('foo', timeout=1),
None)
self.assertEqual(self.redis.brpoplpush('foo', 'bar', timeout=1),
None)
# Tests for the hash type.
def test_hset_then_hget(self):
self.assertEqual(self.redis.hset('foo', 'key', 'value'), 1)
self.assertEqual(self.redis.hget('foo', 'key'), b'value')
def test_hset_update(self):
self.assertEqual(self.redis.hset('foo', 'key', 'value'), 1)
self.assertEqual(self.redis.hset('foo', 'key', 'value'), 0)
def test_hgetall(self):
self.assertEqual(self.redis.hset('foo', 'k1', 'v1'), 1)
self.assertEqual(self.redis.hset('foo', 'k2', 'v2'), 1)
self.assertEqual(self.redis.hset('foo', 'k3', 'v3'), 1)
self.assertEqual(self.redis.hgetall('foo'), {b'k1': b'v1',
b'k2': b'v2',
b'k3': b'v3'})
def test_hgetall_with_tuples(self):
self.assertEqual(self.redis.hset('foo', (1, 2), (1, 2, 3)), 1)
self.assertEqual(self.redis.hgetall('foo'), {b'(1, 2)': b'(1, 2, 3)'})
def test_hgetall_empty_key(self):
self.assertEqual(self.redis.hgetall('foo'), {})
def test_hexists(self):
self.redis.hset('foo', 'bar', 'v1')
self.assertEqual(self.redis.hexists('foo', 'bar'), 1)
self.assertEqual(self.redis.hexists('foo', 'baz'), 0)
self.assertEqual(self.redis.hexists('bar', 'bar'), 0)
def test_hkeys(self):
self.redis.hset('foo', 'k1', 'v1')
self.redis.hset('foo', 'k2', 'v2')
self.assertEqual(set(self.redis.hkeys('foo')), set([b'k1', b'k2']))
self.assertEqual(set(self.redis.hkeys('bar')), set([]))
def test_hlen(self):
self.redis.hset('foo', 'k1', 'v1')
self.redis.hset('foo', 'k2', 'v2')
self.assertEqual(self.redis.hlen('foo'), 2)
def test_hvals(self):
self.redis.hset('foo', 'k1', 'v1')
self.redis.hset('foo', 'k2', 'v2')
self.assertEqual(set(self.redis.hvals('foo')), set([b'v1', b'v2']))
self.assertEqual(set(self.redis.hvals('bar')), set([]))
def test_hmget(self):
self.redis.hset('foo', 'k1', 'v1')
self.redis.hset('foo', 'k2', 'v2')
self.redis.hset('foo', 'k3', 'v3')
# Normal case.
self.assertEqual(self.redis.hmget('foo', ['k1', 'k3']), [b'v1', b'v3'])
self.assertEqual(self.redis.hmget('foo', 'k1', 'k3'), [b'v1', b'v3'])
# Key does not exist.
self.assertEqual(self.redis.hmget('bar', ['k1', 'k3']), [None, None])
self.assertEqual(self.redis.hmget('bar', 'k1', 'k3'), [None, None])
# Some keys in the hash do not exist.
self.assertEqual(self.redis.hmget('foo', ['k1', 'k500']),
[b'v1', None])
self.assertEqual(self.redis.hmget('foo', 'k1', 'k500'),
[b'v1', None])
def test_hdel(self):
self.redis.hset('foo', 'k1', 'v1')
self.redis.hset('foo', 'k2', 'v2')
self.redis.hset('foo', 'k3', 'v3')
self.assertEqual(self.redis.hget('foo', 'k1'), b'v1')
self.assertEqual(self.redis.hdel('foo', 'k1'), True)
self.assertEqual(self.redis.hget('foo', 'k1'), None)
self.assertEqual(self.redis.hdel('foo', 'k1'), False)
# Since redis>=2.7.6 returns number of deleted items.
self.assertEqual(self.redis.hdel('foo', 'k2', 'k3'), 2)
self.assertEqual(self.redis.hget('foo', 'k2'), None)
self.assertEqual(self.redis.hget('foo', 'k3'), None)
self.assertEqual(self.redis.hdel('foo', 'k2', 'k3'), False)
def test_hincrby(self):
self.redis.hset('foo', 'counter', 0)
self.assertEqual(self.redis.hincrby('foo', 'counter'), 1)
self.assertEqual(self.redis.hincrby('foo', 'counter'), 2)
self.assertEqual(self.redis.hincrby('foo', 'counter'), 3)
def test_hincrby_with_no_starting_value(self):
self.assertEqual(self.redis.hincrby('foo', 'counter'), 1)
self.assertEqual(self.redis.hincrby('foo', 'counter'), 2)
self.assertEqual(self.redis.hincrby('foo', 'counter'), 3)
def test_hincrby_with_range_param(self):
self.assertEqual(self.redis.hincrby('foo', 'counter', 2), 2)
self.assertEqual(self.redis.hincrby('foo', 'counter', 2), 4)
self.assertEqual(self.redis.hincrby('foo', 'counter', 2), 6)
def test_hincrbyfloat(self):
self.redis.hset('foo', 'counter', 0.0)
self.assertEqual(self.redis.hincrbyfloat('foo', 'counter'), 1.0)
self.assertEqual(self.redis.hincrbyfloat('foo', 'counter'), 2.0)
self.assertEqual(self.redis.hincrbyfloat('foo', 'counter'), 3.0)
def test_hincrbyfloat_with_no_starting_value(self):
self.assertEqual(self.redis.hincrbyfloat('foo', 'counter'), 1.0)
self.assertEqual(self.redis.hincrbyfloat('foo', 'counter'), 2.0)
self.assertEqual(self.redis.hincrbyfloat('foo', 'counter'), 3.0)
def test_hincrbyfloat_with_range_param(self):
self.assertAlmostEqual(
self.redis.hincrbyfloat('foo', 'counter', 0.1), 0.1)
self.assertAlmostEqual(
self.redis.hincrbyfloat('foo', 'counter', 0.1), 0.2)
self.assertAlmostEqual(
self.redis.hincrbyfloat('foo', 'counter', 0.1), 0.3)
def test_hincrbyfloat_on_non_float_value_raises_error(self):
self.redis.hset('foo', 'counter', 'cat')
with self.assertRaises(redis.ResponseError):
self.redis.hincrbyfloat('foo', 'counter')
def test_hincrbyfloat_with_non_float_amount_raises_error(self):
with self.assertRaises(redis.ResponseError):
self.redis.hincrbyfloat('foo', 'counter', 'cat')
def test_hsetnx(self):
self.assertEqual(self.redis.hsetnx('foo', 'newkey', 'v1'), True)
self.assertEqual(self.redis.hsetnx('foo', 'newkey', 'v1'), False)
self.assertEqual(self.redis.hget('foo', 'newkey'), b'v1')
def test_hmsetset_empty_raises_error(self):
with self.assertRaises(redis.DataError):
self.redis.hmset('foo', {})
def test_hmsetset(self):
self.redis.hset('foo', 'k1', 'v1')
self.assertEqual(self.redis.hmset('foo', {'k2': 'v2', 'k3': 'v3'}),
True)
def test_hmset_convert_values(self):
self.redis.hmset('foo', {'k1': True, 'k2': 1})
self.assertEqual(
self.redis.hgetall('foo'), {b'k1': b'True', b'k2': b'1'})
def test_hmset_does_not_mutate_input_params(self):
original = {'key': [123, 456]}
self.redis.hmset('foo', original)
self.assertEqual(original, {'key': [123, 456]})
def test_sadd(self):
self.assertEqual(self.redis.sadd('foo', 'member1'), 1)
self.assertEqual(self.redis.sadd('foo', 'member1'), 0)
self.assertEqual(self.redis.smembers('foo'), set([b'member1']))
self.assertEqual(self.redis.sadd('foo', 'member2', 'member3'), 2)
self.assertEqual(self.redis.smembers('foo'),
set([b'member1', b'member2', b'member3']))
self.assertEqual(self.redis.sadd('foo', 'member3', 'member4'), 1)
self.assertEqual(self.redis.smembers('foo'),
set([b'member1', b'member2', b'member3', b'member4']))
def test_sadd_as_str_type(self):
self.assertEqual(self.redis.sadd('foo', *range(3)), 3)
self.assertEqual(self.redis.smembers('foo'), set([b'0', b'1', b'2']))
def test_scan_single(self):
self.redis.set('foo1', 'bar1')
self.assertEqual(self.redis.scan(match="foo*"), (0, [b'foo1']))
def test_scan_iter_single_page(self):
self.redis.set('foo1', 'bar1')
self.redis.set('foo2', 'bar2')
self.assertEqual(set(self.redis.scan_iter(match="foo*")),
set([b'foo1', b'foo2']))
def test_scan_iter_multiple_pages(self):
all_keys = key_val_dict(size=100)
self.assertTrue(
all(self.redis.set(k, v) for k, v in all_keys.items()))
self.assertEqual(
set(self.redis.scan_iter()),
set(all_keys))
def test_scan_iter_multiple_pages_with_match(self):
all_keys = key_val_dict(size=100)
self.assertTrue(
all(self.redis.set(k, v) for k, v in all_keys.items()))
# Now add a few keys that don't match the key:<number> pattern.
self.redis.set('otherkey', 'foo')
self.redis.set('andanother', 'bar')
actual = set(self.redis.scan_iter(match='key:*'))
self.assertEqual(actual, set(all_keys))
def test_scan_multiple_pages_with_count_arg(self):
all_keys = key_val_dict(size=100)
self.assertTrue(
all(self.redis.set(k, v) for k, v in all_keys.items()))
self.assertEqual(
set(self.redis.scan_iter(count=1000)),
set(all_keys))
def test_scan_all_in_single_call(self):
all_keys = key_val_dict(size=100)
self.assertTrue(
all(self.redis.set(k, v) for k, v in all_keys.items()))
# Specify way more than the 100 keys we've added.
actual = self.redis.scan(count=1000)
self.assertEqual(set(actual[1]), set(all_keys))
self.assertEqual(actual[0], 0)
def test_scard(self):
self.redis.sadd('foo', 'member1')
self.redis.sadd('foo', 'member2')
self.redis.sadd('foo', 'member2')
self.assertEqual(self.redis.scard('foo'), 2)
def test_sdiff(self):
self.redis.sadd('foo', 'member1')
self.redis.sadd('foo', 'member2')
self.redis.sadd('bar', 'member2')
self.redis.sadd('bar', 'member3')
self.assertEqual(self.redis.sdiff('foo', 'bar'), set([b'member1']))
# Original sets shouldn't be modified.
self.assertEqual(self.redis.smembers('foo'),
set([b'member1', b'member2']))
self.assertEqual(self.redis.smembers('bar'),
set([b'member2', b'member3']))
def test_sdiff_one_key(self):
self.redis.sadd('foo', 'member1')
self.redis.sadd('foo', 'member2')
self.assertEqual(self.redis.sdiff('foo'),
set([b'member1', b'member2']))
def test_sdiff_empty(self):
self.assertEqual(self.redis.sdiff('foo'), set())
def test_sdiffstore(self):
self.redis.sadd('foo', 'member1')
self.redis.sadd('foo', 'member2')
self.redis.sadd('bar', 'member2')
self.redis.sadd('bar', 'member3')
self.assertEqual(self.redis.sdiffstore('baz', 'foo', 'bar'), 1)
def test_sinter(self):
self.redis.sadd('foo', 'member1')
self.redis.sadd('foo', 'member2')
self.redis.sadd('bar', 'member2')
self.redis.sadd('bar', 'member3')
self.assertEqual(self.redis.sinter('foo', 'bar'), set([b'member2']))
self.assertEqual(self.redis.sinter('foo'),
set([b'member1', b'member2']))
def test_sinterstore(self):
self.redis.sadd('foo', 'member1')
self.redis.sadd('foo', 'member2')
self.redis.sadd('bar', 'member2')
self.redis.sadd('bar', 'member3')
self.assertEqual(self.redis.sinterstore('baz', 'foo', 'bar'), 1)
def test_sismember(self):
self.assertEqual(self.redis.sismember('foo', 'member1'), False)
self.redis.sadd('foo', 'member1')
self.assertEqual(self.redis.sismember('foo', 'member1'), True)
def test_smembers(self):
self.assertEqual(self.redis.smembers('foo'), set())
def test_smove(self):
self.redis.sadd('foo', 'member1')
self.redis.sadd('foo', 'member2')
self.assertEqual(self.redis.smove('foo', 'bar', 'member1'), True)
self.assertEqual(self.redis.smembers('bar'), set([b'member1']))
def test_smove_non_existent_key(self):
self.assertEqual(self.redis.smove('foo', 'bar', 'member1'), False)
def test_spop(self):
# This is tricky because it pops a random element.
self.redis.sadd('foo', 'member1')
self.assertEqual(self.redis.spop('foo'), b'member1')
self.assertEqual(self.redis.spop('foo'), None)
def test_srandmember(self):
self.redis.sadd('foo', 'member1')
self.assertEqual(self.redis.srandmember('foo'), b'member1')
# Shouldn't be removed from the set.
self.assertEqual(self.redis.srandmember('foo'), b'member1')
def test_srem(self):
self.redis.sadd('foo', 'member1', 'member2', 'member3', 'member4')
self.assertEqual(self.redis.smembers('foo'),
set([b'member1', b'member2', b'member3', b'member4']))
self.assertEqual(self.redis.srem('foo', 'member1'), True)
self.assertEqual(self.redis.smembers('foo'),
set([b'member2', b'member3', b'member4']))
self.assertEqual(self.redis.srem('foo', 'member1'), False)
# Since redis>=2.7.6 returns number of deleted items.
self.assertEqual(self.redis.srem('foo', 'member2', 'member3'), 2)
self.assertEqual(self.redis.smembers('foo'), set([b'member4']))
self.assertEqual(self.redis.srem('foo', 'member3', 'member4'), True)
self.assertEqual(self.redis.smembers('foo'), set([]))
self.assertEqual(self.redis.srem('foo', 'member3', 'member4'), False)
def test_sunion(self):
self.redis.sadd('foo', 'member1')
self.redis.sadd('foo', 'member2')
self.redis.sadd('bar', 'member2')
self.redis.sadd('bar', 'member3')
self.assertEqual(self.redis.sunion('foo', 'bar'),
set([b'member1', b'member2', b'member3']))
def test_sunionstore(self):
self.redis.sadd('foo', 'member1')
self.redis.sadd('foo', 'member2')
self.redis.sadd('bar', 'member2')
self.redis.sadd('bar', 'member3')
self.assertEqual(self.redis.sunionstore('baz', 'foo', 'bar'), 3)
self.assertEqual(self.redis.smembers('baz'),
set([b'member1', b'member2', b'member3']))
def test_zadd(self):
self.redis.zadd('foo', four=4)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zadd('foo', 2, 'two', 1, 'one', zero=0), 3)
self.assertEqual(self.redis.zrange('foo', 0, -1),
[b'zero', b'one', b'two', b'three', b'four'])
self.assertEqual(self.redis.zadd('foo', 7, 'zero', one=1, five=5), 1)
self.assertEqual(self.redis.zrange('foo', 0, -1),
[b'one', b'two', b'three', b'four', b'five', b'zero'])
def test_zadd_uses_str(self):
self.redis.zadd('foo', 12345, (1, 2, 3))
self.assertEqual(self.redis.zrange('foo', 0, 0), [b'(1, 2, 3)'])
def test_zadd_errors(self):
# The args are backwards, it should be 2, "two", so we
# expect an exception to be raised.
with self.assertRaises(redis.ResponseError):
self.redis.zadd('foo', 'two', 2)
with self.assertRaises(redis.ResponseError):
self.redis.zadd('foo', two='two')
# It's expected an equal number of values and scores
with self.assertRaises(redis.RedisError):
self.redis.zadd('foo', 'two')
def test_zadd_multiple(self):
self.redis.zadd('foo', 1, 'one', 2, 'two')
self.assertEqual(self.redis.zrange('foo', 0, 0),
[b'one'])
self.assertEqual(self.redis.zrange('foo', 1, 1),
[b'two'])
def test_zrange_same_score(self):
self.redis.zadd('foo', two_a=2)
self.redis.zadd('foo', two_b=2)
self.redis.zadd('foo', two_c=2)
self.redis.zadd('foo', two_d=2)
self.redis.zadd('foo', two_e=2)
self.assertEqual(self.redis.zrange('foo', 2, 3),
[b'two_c', b'two_d'])
def test_zcard(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.assertEqual(self.redis.zcard('foo'), 2)
def test_zcard_non_existent_key(self):
self.assertEqual(self.redis.zcard('foo'), 0)
def test_zcount(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', three=2)
self.redis.zadd('foo', five=5)
self.assertEqual(self.redis.zcount('foo', 2, 4), 1)
self.assertEqual(self.redis.zcount('foo', 1, 4), 2)
self.assertEqual(self.redis.zcount('foo', 0, 5), 3)
self.assertEqual(self.redis.zcount('foo', 4, '+inf'), 1)
self.assertEqual(self.redis.zcount('foo', '-inf', 4), 2)
self.assertEqual(self.redis.zcount('foo', '-inf', '+inf'), 3)
def test_zcount_exclusive(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', three=2)
self.redis.zadd('foo', five=5)
self.assertEqual(self.redis.zcount('foo', '-inf', '(2'), 1)
self.assertEqual(self.redis.zcount('foo', '-inf', 2), 2)
self.assertEqual(self.redis.zcount('foo', '(5', '+inf'), 0)
self.assertEqual(self.redis.zcount('foo', '(1', 5), 2)
self.assertEqual(self.redis.zcount('foo', '(2', '(5'), 0)
self.assertEqual(self.redis.zcount('foo', '(1', '(5'), 1)
self.assertEqual(self.redis.zcount('foo', 2, '(5'), 1)
def test_zincrby(self):
self.redis.zadd('foo', one=1)
self.assertEqual(self.redis.zincrby('foo', 'one', 10), 11)
self.assertEqual(self.redis.zrange('foo', 0, -1, withscores=True),
[(b'one', 11)])
def test_zrange_descending(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrange('foo', 0, -1, desc=True),
[b'three', b'two', b'one'])
def test_zrange_descending_with_scores(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrange('foo', 0, -1, desc=True,
withscores=True),
[(b'three', 3), (b'two', 2), (b'one', 1)])
def test_zrange_descending_with_scores_cast_func(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrange('foo', 0, -1, desc=True,
withscores=True, score_cast_func=str),
[('three', "3.0"), ('two', "2.0"), ('one', "1.0")])
def test_zrange_with_positive_indices(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrange('foo', 0, 1), [b'one', b'two'])
def test_zrank(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrank('foo', 'one'), 0)
self.assertEqual(self.redis.zrank('foo', 'two'), 1)
self.assertEqual(self.redis.zrank('foo', 'three'), 2)
def test_zrank_non_existent_member(self):
self.assertEqual(self.redis.zrank('foo', 'one'), None)
def test_zrem(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.redis.zadd('foo', four=4)
self.assertEqual(self.redis.zrem('foo', 'one'), True)
self.assertEqual(self.redis.zrange('foo', 0, -1),
[b'two', b'three', b'four'])
# Since redis>=2.7.6 returns number of deleted items.
self.assertEqual(self.redis.zrem('foo', 'two', 'three'), 2)
self.assertEqual(self.redis.zrange('foo', 0, -1), [b'four'])
self.assertEqual(self.redis.zrem('foo', 'three', 'four'), True)
self.assertEqual(self.redis.zrange('foo', 0, -1), [])
self.assertEqual(self.redis.zrem('foo', 'three', 'four'), False)
def test_zrem_non_existent_member(self):
self.assertFalse(self.redis.zrem('foo', 'one'))
def test_zrem_numeric_member(self):
self.redis.zadd('foo', **{'128': 13.0, '129': 12.0})
self.assertEqual(self.redis.zrem('foo', 128), True)
self.assertEqual(self.redis.zrange('foo', 0, -1), [b'129'])
def test_zscore(self):
self.redis.zadd('foo', one=54)
self.assertEqual(self.redis.zscore('foo', 'one'), 54)
def test_zscore_non_existent_member(self):
self.assertIsNone(self.redis.zscore('foo', 'one'))
def test_zrevrank(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrevrank('foo', 'one'), 2)
self.assertEqual(self.redis.zrevrank('foo', 'two'), 1)
self.assertEqual(self.redis.zrevrank('foo', 'three'), 0)
def test_zrevrank_non_existent_member(self):
self.assertEqual(self.redis.zrevrank('foo', 'one'), None)
def test_zrevrange(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrevrange('foo', 0, 1), [b'three', b'two'])
self.assertEqual(self.redis.zrevrange('foo', 0, -1),
[b'three', b'two', b'one'])
def test_zrevrange_sorted_keys(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', 2, 'two_b')
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrevrange('foo', 0, 2),
[b'three', b'two_b', b'two'])
self.assertEqual(self.redis.zrevrange('foo', 0, -1),
[b'three', b'two_b', b'two', b'one'])
def test_zrangebyscore(self):
self.redis.zadd('foo', zero=0)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', two_a_also=2)
self.redis.zadd('foo', two_b_also=2)
self.redis.zadd('foo', four=4)
self.assertEqual(self.redis.zrangebyscore('foo', 1, 3),
[b'two', b'two_a_also', b'two_b_also'])
self.assertEqual(self.redis.zrangebyscore('foo', 2, 3),
[b'two', b'two_a_also', b'two_b_also'])
self.assertEqual(self.redis.zrangebyscore('foo', 0, 4),
[b'zero', b'two', b'two_a_also', b'two_b_also',
b'four'])
self.assertEqual(self.redis.zrangebyscore('foo', '-inf', 1),
[b'zero'])
self.assertEqual(self.redis.zrangebyscore('foo', 2, '+inf'),
[b'two', b'two_a_also', b'two_b_also', b'four'])
self.assertEqual(self.redis.zrangebyscore('foo', '-inf', '+inf'),
[b'zero', b'two', b'two_a_also', b'two_b_also',
b'four'])
def test_zrangebysore_exclusive(self):
self.redis.zadd('foo', zero=0)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', four=4)
self.redis.zadd('foo', five=5)
self.assertEqual(self.redis.zrangebyscore('foo', '(0', 6),
[b'two', b'four', b'five'])
self.assertEqual(self.redis.zrangebyscore('foo', '(2', '(5'),
[b'four'])
self.assertEqual(self.redis.zrangebyscore('foo', 0, '(4'),
[b'zero', b'two'])
def test_zrangebyscore_raises_error(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
with self.assertRaises(redis.ResponseError):
self.redis.zrangebyscore('foo', 'one', 2)
with self.assertRaises(redis.ResponseError):
self.redis.zrangebyscore('foo', 2, 'three')
with self.assertRaises(redis.ResponseError):
self.redis.zrangebyscore('foo', 2, '3)')
with self.assertRaises(redis.RedisError):
self.redis.zrangebyscore('foo', 2, '3)', 0, None)
def test_zrangebyscore_slice(self):
self.redis.zadd('foo', two_a=2)
self.redis.zadd('foo', two_b=2)
self.redis.zadd('foo', two_c=2)
self.redis.zadd('foo', two_d=2)
self.assertEqual(self.redis.zrangebyscore('foo', 0, 4, 0, 2),
[b'two_a', b'two_b'])
self.assertEqual(self.redis.zrangebyscore('foo', 0, 4, 1, 3),
[b'two_b', b'two_c', b'two_d'])
def test_zrangebyscore_withscores(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrangebyscore('foo', 1, 3, 0, 2, True),
[(b'one', 1), (b'two', 2)])
def test_zrangebyscore_withscores_cast_func(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrangebyscore('foo', 1, 3, 0, 2, True, score_cast_func=str),
[('one', "1.0"), ('two', "2.0")])
def test_zrevrangebyscore(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrevrangebyscore('foo', 3, 1),
[b'three', b'two', b'one'])
self.assertEqual(self.redis.zrevrangebyscore('foo', 3, 2),
[b'three', b'two'])
self.assertEqual(self.redis.zrevrangebyscore('foo', 3, 1, 0, 1),
[b'three'])
self.assertEqual(self.redis.zrevrangebyscore('foo', 3, 1, 1, 2),
[b'two', b'one'])
def test_zrevrangebyscore_exclusive(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zrevrangebyscore('foo', '(3', 1),
[b'two', b'one'])
self.assertEqual(self.redis.zrevrangebyscore('foo', 3, '(2'),
[b'three'])
self.assertEqual(self.redis.zrevrangebyscore('foo', '(3', '(1'),
[b'two'])
self.assertEqual(self.redis.zrevrangebyscore('foo', '(2', 1, 0, 1),
[b'one'])
self.assertEqual(self.redis.zrevrangebyscore('foo', '(2', '(1', 0, 1),
[])
self.assertEqual(self.redis.zrevrangebyscore('foo', '(3', '(0', 1, 2),
[b'one'])
def test_zrevrangebyscore_raises_error(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
with self.assertRaises(redis.ResponseError):
self.redis.zrevrangebyscore('foo', 'three', 1)
with self.assertRaises(redis.ResponseError):
self.redis.zrevrangebyscore('foo', 3, 'one')
with self.assertRaises(redis.ResponseError):
self.redis.zrevrangebyscore('foo', 3, '1)')
with self.assertRaises(redis.ResponseError):
self.redis.zrevrangebyscore('foo', '((3', '1)')
def test_zremrangebyrank(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zremrangebyrank('foo', 0, 1), 2)
self.assertEqual(self.redis.zrange('foo', 0, -1), [b'three'])
def test_zremrangebyrank_negative_indices(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', three=3)
self.assertEqual(self.redis.zremrangebyrank('foo', -2, -1), 2)
self.assertEqual(self.redis.zrange('foo', 0, -1), [b'one'])
def test_zremrangebyrank_out_of_bounds(self):
self.redis.zadd('foo', one=1)
self.assertEqual(self.redis.zremrangebyrank('foo', 1, 3), 0)
def test_zremrangebyscore(self):
self.redis.zadd('foo', zero=0)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', four=4)
# Outside of range.
self.assertEqual(self.redis.zremrangebyscore('foo', 5, 10), 0)
self.assertEqual(self.redis.zrange('foo', 0, -1),
[b'zero', b'two', b'four'])
# Middle of range.
self.assertEqual(self.redis.zremrangebyscore('foo', 1, 3), 1)
self.assertEqual(self.redis.zrange('foo', 0, -1), [b'zero', b'four'])
self.assertEqual(self.redis.zremrangebyscore('foo', 1, 3), 0)
# Entire range.
self.assertEqual(self.redis.zremrangebyscore('foo', 0, 4), 2)
self.assertEqual(self.redis.zrange('foo', 0, -1), [])
def test_zremrangebyscore_exclusive(self):
self.redis.zadd('foo', zero=0)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', four=4)
self.assertEqual(self.redis.zremrangebyscore('foo', '(0', 1), 0)
self.assertEqual(self.redis.zrange('foo', 0, -1),
[b'zero', b'two', b'four'])
self.assertEqual(self.redis.zremrangebyscore('foo', '-inf', '(0'), 0)
self.assertEqual(self.redis.zrange('foo', 0, -1),
[b'zero', b'two', b'four'])
self.assertEqual(self.redis.zremrangebyscore('foo', '(2', 5), 1)
self.assertEqual(self.redis.zrange('foo', 0, -1), [b'zero', b'two'])
self.assertEqual(self.redis.zremrangebyscore('foo', 0, '(2'), 1)
self.assertEqual(self.redis.zrange('foo', 0, -1), [b'two'])
self.assertEqual(self.redis.zremrangebyscore('foo', '(1', '(3'), 1)
self.assertEqual(self.redis.zrange('foo', 0, -1), [])
def test_zremrangebyscore_raises_error(self):
self.redis.zadd('foo', zero=0)
self.redis.zadd('foo', two=2)
self.redis.zadd('foo', four=4)
with self.assertRaises(redis.ResponseError):
self.redis.zremrangebyscore('foo', 'three', 1)
with self.assertRaises(redis.ResponseError):
self.redis.zremrangebyscore('foo', 3, 'one')
with self.assertRaises(redis.ResponseError):
self.redis.zremrangebyscore('foo', 3, '1)')
with self.assertRaises(redis.ResponseError):
self.redis.zremrangebyscore('foo', '((3', '1)')
def test_zremrangebyscore_badkey(self):
self.assertEqual(self.redis.zremrangebyscore('foo', 0, 2), 0)
def test_zunionstore(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('bar', one=1)
self.redis.zadd('bar', two=2)
self.redis.zadd('bar', three=3)
self.redis.zunionstore('baz', ['foo', 'bar'])
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 2), (b'three', 3), (b'two', 4)])
def test_zunionstore_sum(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('bar', one=1)
self.redis.zadd('bar', two=2)
self.redis.zadd('bar', three=3)
self.redis.zunionstore('baz', ['foo', 'bar'], aggregate='SUM')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 2), (b'three', 3), (b'two', 4)])
def test_zunionstore_max(self):
self.redis.zadd('foo', one=0)
self.redis.zadd('foo', two=0)
self.redis.zadd('bar', one=1)
self.redis.zadd('bar', two=2)
self.redis.zadd('bar', three=3)
self.redis.zunionstore('baz', ['foo', 'bar'], aggregate='MAX')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 1), (b'two', 2), (b'three', 3)])
def test_zunionstore_min(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('bar', one=0)
self.redis.zadd('bar', two=0)
self.redis.zadd('bar', three=3)
self.redis.zunionstore('baz', ['foo', 'bar'], aggregate='MIN')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 0), (b'two', 0), (b'three', 3)])
def test_zunionstore_weights(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('bar', one=1)
self.redis.zadd('bar', two=2)
self.redis.zadd('bar', four=4)
self.redis.zunionstore('baz', {'foo': 1, 'bar': 2}, aggregate='SUM')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 3), (b'two', 6), (b'four', 8)])
def test_zunionstore_mixed_set_types(self):
# No score, redis will use 1.0.
self.redis.sadd('foo', 'one')
self.redis.sadd('foo', 'two')
self.redis.zadd('bar', one=1)
self.redis.zadd('bar', two=2)
self.redis.zadd('bar', three=3)
self.redis.zunionstore('baz', ['foo', 'bar'], aggregate='SUM')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 2), (b'three', 3), (b'two', 3)])
def test_zunionstore_badkey(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zunionstore('baz', ['foo', 'bar'], aggregate='SUM')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 1), (b'two', 2)])
self.redis.zunionstore('baz', {'foo': 1, 'bar': 2}, aggregate='SUM')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 1), (b'two', 2)])
def test_zinterstore(self):
self.redis.zadd('foo', one=1)
self.redis.zadd('foo', two=2)
self.redis.zadd('bar', one=1)
self.redis.zadd('bar', two=2)
self.redis.zadd('bar', three=3)
self.redis.zinterstore('baz', ['foo', 'bar'])
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 2), (b'two', 4)])
def test_zinterstore_mixed_set_types(self):
self.redis.sadd('foo', 'one')
self.redis.sadd('foo', 'two')
self.redis.zadd('bar', one=1)
self.redis.zadd('bar', two=2)
self.redis.zadd('bar', three=3)
self.redis.zinterstore('baz', ['foo', 'bar'], aggregate='SUM')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 2), (b'two', 3)])
def test_zinterstore_max(self):
self.redis.zadd('foo', one=0)
self.redis.zadd('foo', two=0)
self.redis.zadd('bar', one=1)
self.redis.zadd('bar', two=2)
self.redis.zadd('bar', three=3)
self.redis.zinterstore('baz', ['foo', 'bar'], aggregate='MAX')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 1), (b'two', 2)])
def test_zinterstore_onekey(self):
self.redis.zadd('foo', one=1)
self.redis.zinterstore('baz', ['foo'], aggregate='MAX')
self.assertEqual(self.redis.zrange('baz', 0, -1, withscores=True),
[(b'one', 1)])
def test_zinterstore_nokey(self):
with self.assertRaises(redis.ResponseError):
self.redis.zinterstore('baz', [], aggregate='MAX')
def test_zunionstore_nokey(self):
with self.assertRaises(redis.ResponseError):
self.redis.zunionstore('baz', [], aggregate='MAX')
def test_multidb(self):
r1 = self.create_redis(db=0)
r2 = self.create_redis(db=1)
r1['r1'] = 'r1'
r2['r2'] = 'r2'
self.assertTrue('r2' not in r1)
self.assertTrue('r1' not in r2)
self.assertEqual(r1['r1'], b'r1')
self.assertEqual(r2['r2'], b'r2')
r1.flushall()
self.assertTrue('r1' not in r1)
self.assertTrue('r2' not in r2)
def test_basic_sort(self):
self.redis.rpush('foo', '2')
self.redis.rpush('foo', '1')
self.redis.rpush('foo', '3')
self.assertEqual(self.redis.sort('foo'), [b'1', b'2', b'3'])
def test_empty_sort(self):
self.assertEqual(self.redis.sort('foo'), [])
def test_sort_range_offset_range(self):
self.redis.rpush('foo', '2')
self.redis.rpush('foo', '1')
self.redis.rpush('foo', '4')
self.redis.rpush('foo', '3')
self.assertEqual(self.redis.sort('foo', start=0, num=2), [b'1', b'2'])
def test_sort_range_offset_range_and_desc(self):
self.redis.rpush('foo', '2')
self.redis.rpush('foo', '1')
self.redis.rpush('foo', '4')
self.redis.rpush('foo', '3')
self.assertEqual(self.redis.sort("foo", start=0, num=1, desc=True),
[b"4"])
def test_sort_range_offset_norange(self):
with self.assertRaises(redis.RedisError):
self.redis.sort('foo', start=1)
def test_sort_range_with_large_range(self):
self.redis.rpush('foo', '2')
self.redis.rpush('foo', '1')
self.redis.rpush('foo', '4')
self.redis.rpush('foo', '3')
# num=20 even though len(foo) is 4.
self.assertEqual(self.redis.sort('foo', start=1, num=20),
[b'2', b'3', b'4'])
def test_sort_descending(self):
self.redis.rpush('foo', '1')
self.redis.rpush('foo', '2')
self.redis.rpush('foo', '3')
self.assertEqual(self.redis.sort('foo', desc=True), [b'3', b'2', b'1'])
def test_sort_alpha(self):
self.redis.rpush('foo', '2a')
self.redis.rpush('foo', '1b')
self.redis.rpush('foo', '2b')
self.redis.rpush('foo', '1a')
self.assertEqual(self.redis.sort('foo', alpha=True),
[b'1a', b'1b', b'2a', b'2b'])
def test_foo(self):
self.redis.rpush('foo', '2a')
self.redis.rpush('foo', '1b')
self.redis.rpush('foo', '2b')
self.redis.rpush('foo', '1a')
with self.assertRaises(redis.ResponseError):
self.redis.sort('foo', alpha=False)
def test_sort_with_store_option(self):
self.redis.rpush('foo', '2')
self.redis.rpush('foo', '1')
self.redis.rpush('foo', '4')
self.redis.rpush('foo', '3')
self.assertEqual(self.redis.sort('foo', store='bar'), 4)
self.assertEqual(self.redis.lrange('bar', 0, -1),
[b'1', b'2', b'3', b'4'])
def test_sort_with_by_and_get_option(self):
self.redis.rpush('foo', '2')
self.redis.rpush('foo', '1')
self.redis.rpush('foo', '4')
self.redis.rpush('foo', '3')
self.redis['weight_1'] = '4'
self.redis['weight_2'] = '3'
self.redis['weight_3'] = '2'
self.redis['weight_4'] = '1'
self.redis['data_1'] = 'one'
self.redis['data_2'] = 'two'
self.redis['data_3'] = 'three'
self.redis['data_4'] = 'four'
self.assertEqual(self.redis.sort('foo', by='weight_*', get='data_*'),
[b'four', b'three', b'two', b'one'])
self.assertEqual(self.redis.sort('foo', by='weight_*', get='#'),
[b'4', b'3', b'2', b'1'])
self.assertEqual(
self.redis.sort('foo', by='weight_*', get=('data_*', '#')),
[b'four', b'4', b'three', b'3', b'two', b'2', b'one', b'1'])
self.assertEqual(self.redis.sort('foo', by='weight_*', get='data_1'),
[None, None, None, None])
def test_sort_with_hash(self):
self.redis.rpush('foo', 'middle')
self.redis.rpush('foo', 'eldest')
self.redis.rpush('foo', 'youngest')
self.redis.hset('record_youngest', 'age', 1)
self.redis.hset('record_youngest', 'name', 'baby')
self.redis.hset('record_middle', 'age', 10)
self.redis.hset('record_middle', 'name', 'teen')
self.redis.hset('record_eldest', 'age', 20)
self.redis.hset('record_eldest', 'name', 'adult')
self.assertEqual(self.redis.sort('foo', by='record_*->age'),
[b'youngest', b'middle', b'eldest'])
self.assertEqual(
self.redis.sort('foo', by='record_*->age', get='record_*->name'),
[b'baby', b'teen', b'adult'])
def test_sort_with_set(self):
self.redis.sadd('foo', '3')
self.redis.sadd('foo', '1')
self.redis.sadd('foo', '2')
self.assertEqual(self.redis.sort('foo'), [b'1', b'2', b'3'])
def test_pipeline(self):
# The pipeline method returns an object for
# issuing multiple commands in a batch.
p = self.redis.pipeline()
p.watch('bam')
p.multi()
p.set('foo', 'bar').get('foo')
p.lpush('baz', 'quux')
p.lpush('baz', 'quux2').lrange('baz', 0, -1)
res = p.execute()
# Check return values returned as list.
self.assertEqual([True, b'bar', 1, 2, [b'quux2', b'quux']], res)
# Check side effects happened as expected.
self.assertEqual([b'quux2', b'quux'], self.redis.lrange('baz', 0, -1))
# Check that the command buffer has been emptied.
self.assertEqual([], p.execute())
def test_multiple_successful_watch_calls(self):
p = self.redis.pipeline()
p.watch('bam')
p.multi()
p.set('foo', 'bar')
# Check that the watched keys buffer has been emptied.
p.execute()
# bam is no longer being watched, so it's ok to modify
# it now.
p.watch('foo')
self.redis.set('bam', 'boo')
p.multi()
p.set('foo', 'bats')
self.assertEqual(p.execute(), [True])
def test_pipeline_non_transactional(self):
# For our simple-minded model I don't think
# there is any observable difference.
p = self.redis.pipeline(transaction=False)
res = p.set('baz', 'quux').get('baz').execute()
self.assertEqual([True, b'quux'], res)
def test_pipeline_raises_when_watched_key_changed(self):
self.redis.set('foo', 'bar')
self.redis.rpush('greet', 'hello')
p = self.redis.pipeline()
self.addCleanup(p.reset)
p.watch('greet', 'foo')
nextf = p.get('foo') + b'baz'
# Simulate change happening on another thread.
self.redis.rpush('greet', 'world')
# Begin pipelining.
p.multi()
p.set('foo', nextf)
with self.assertRaises(redis.WatchError):
p.execute()
def test_pipeline_succeeds_despite_unwatched_key_changed(self):
# Same setup as before except for the params to the WATCH command.
self.redis.set('foo', 'bar')
self.redis.rpush('greet', 'hello')
p = self.redis.pipeline()
try:
# Only watch one of the 2 keys.
p.watch('foo')
nextf = p.get('foo') + b'baz'
# Simulate change happening on another thread.
self.redis.rpush('greet', 'world')
p.multi()
p.set('foo', nextf)
p.execute()
# Check the commands were executed.
self.assertEqual(b'barbaz', self.redis.get('foo'))
finally:
p.reset()
def test_pipeline_succeeds_when_watching_nonexistent_key(self):
self.redis.set('foo', 'bar')
self.redis.rpush('greet', 'hello')
p = self.redis.pipeline()
try:
# Also watch a nonexistent key.
p.watch('foo', 'bam')
nextf = p.get('foo') + b'baz'
# Simulate change happening on another thread.
self.redis.rpush('greet', 'world')
p.multi()
p.set('foo', nextf)
p.execute()
# Check the commands were executed.
self.assertEqual(b'barbaz', self.redis.get('foo'))
finally:
p.reset()
def test_watch_state_is_cleared_across_multiple_watches(self):
self.redis.set('foo', 'one')
self.redis.set('bar', 'baz')
p = self.redis.pipeline()
self.addCleanup(p.reset)
p.watch('foo')
# Simulate change happening on another thread.
self.redis.set('foo', 'three')
p.multi()
p.set('foo', 'three')
with self.assertRaises(redis.WatchError):
p.execute()
# Now watch another key. It should be ok to change
# foo as we're no longer watching it.
p.watch('bar')
self.redis.set('foo', 'four')
p.multi()
p.set('bar', 'five')
self.assertEqual(p.execute(), [True])
def test_pipeline_proxies_to_redis_object(self):
p = self.redis.pipeline()
self.assertTrue(hasattr(p, 'zadd'))
with self.assertRaises(AttributeError):
p.non_existent_attribute
def test_pipeline_as_context_manager(self):
self.redis.set('foo', 'bar')
with self.redis.pipeline() as p:
p.watch('foo')
self.assertTrue(isinstance(p, redis.client.BasePipeline)
or p.need_reset)
p.multi()
p.set('foo', 'baz')
p.execute()
# Usually you would consider the pipeline to
# have been destroyed
# after the with statement, but we need to check
# it was reset properly:
self.assertTrue(isinstance(p, redis.client.BasePipeline)
or not p.need_reset)
def test_pipeline_transaction_shortcut(self):
# This example taken pretty much from the redis-py documentation.
self.redis.set('OUR-SEQUENCE-KEY', 13)
calls = []
def client_side_incr(pipe):
calls.append((pipe,))
current_value = pipe.get('OUR-SEQUENCE-KEY')
next_value = int(current_value) + 1
if len(calls) < 3:
# Simulate a change from another thread.
self.redis.set('OUR-SEQUENCE-KEY', next_value)
pipe.multi()
pipe.set('OUR-SEQUENCE-KEY', next_value)
res = self.redis.transaction(client_side_incr, 'OUR-SEQUENCE-KEY')
self.assertEqual([True], res)
self.assertEqual(16, int(self.redis.get('OUR-SEQUENCE-KEY')))
self.assertEqual(3, len(calls))
def test_key_patterns(self):
self.redis.mset({'one': 1, 'two': 2, 'three': 3, 'four': 4})
self.assertItemsEqual(self.redis.keys('*o*'),
[b'four', b'one', b'two'])
self.assertItemsEqual(self.redis.keys('t??'), [b'two'])
self.assertItemsEqual(self.redis.keys('*'),
[b'four', b'one', b'two', b'three'])
self.assertItemsEqual(self.redis.keys(),
[b'four', b'one', b'two', b'three'])
def test_ping(self):
self.assertTrue(self.redis.ping())
def test_type(self):
self.redis.set('string_key', "value")
self.redis.lpush("list_key", "value")
self.redis.sadd("set_key", "value")
self.redis.zadd("zset_key", 1, "value")
self.redis.hset('hset_key', 'key', 'value')
self.assertEqual(self.redis.type('string_key'), b'string')
self.assertEqual(self.redis.type('list_key'), b'list')
self.assertEqual(self.redis.type('set_key'), b'set')
self.assertEqual(self.redis.type('zset_key'), b'zset')
self.assertEqual(self.redis.type('hset_key'), b'hash')
class TestFakeRedis(unittest.TestCase):
def setUp(self):
self.redis = self.create_redis()
def tearDown(self):
self.redis.flushall()
del self.redis
def assertInRange(self, value, start, end, msg=None):
self.assertGreaterEqual(value, start, msg)
self.assertLessEqual(value, end, msg)
def create_redis(self, db=0):
return fakeredis.FakeRedis(db=db)
def test_setex(self):
self.assertEqual(self.redis.setex('foo', 'bar', 100), True)
self.assertEqual(self.redis.get('foo'), b'bar')
def test_setex_using_timedelta(self):
self.assertEqual(
self.redis.setex('foo', 'bar', timedelta(seconds=100)), True)
self.assertEqual(self.redis.get('foo'), b'bar')
def test_lrem_postitive_count(self):
self.redis.lpush('foo', 'same')
self.redis.lpush('foo', 'same')
self.redis.lpush('foo', 'different')
self.redis.lrem('foo', 'same', 2)
self.assertEqual(self.redis.lrange('foo', 0, -1), [b'different'])
def test_lrem_negative_count(self):
self.redis.lpush('foo', 'removeme')
self.redis.lpush('foo', 'three')
self.redis.lpush('foo', 'two')
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'removeme')
self.redis.lrem('foo', 'removeme', -1)
# Should remove it from the end of the list,
# leaving the 'removeme' from the front of the list alone.
self.assertEqual(self.redis.lrange('foo', 0, -1),
[b'removeme', b'one', b'two', b'three'])
def test_lrem_zero_count(self):
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'one')
self.redis.lrem('foo', 'one')
self.assertEqual(self.redis.lrange('foo', 0, -1), [])
def test_lrem_default_value(self):
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'one')
self.redis.lpush('foo', 'one')
self.redis.lrem('foo', 'one')
self.assertEqual(self.redis.lrange('foo', 0, -1), [])
def test_lrem_does_not_exist(self):
self.redis.lpush('foo', 'one')
self.redis.lrem('foo', 'one')
# These should be noops.
self.redis.lrem('foo', 'one', -2)
self.redis.lrem('foo', 'one', 2)
def test_lrem_return_value(self):
self.redis.lpush('foo', 'one')
count = self.redis.lrem('foo', 'one', 0)
self.assertEqual(count, 1)
self.assertEqual(self.redis.lrem('foo', 'one'), 0)
def test_zadd_deprecated(self):
self.redis.zadd('foo', 'one', 1)
self.assertEqual(self.redis.zrange('foo', 0, -1), [b'one'])
def test_zadd_missing_required_params(self):
with self.assertRaises(redis.RedisError):
# Missing the 'score' param.
self.redis.zadd('foo', 'one')
with self.assertRaises(redis.RedisError):
# Missing the 'value' param.
self.redis.zadd('foo', None, score=1)
def test_zadd_with_single_keypair(self):
self.redis.zadd('foo', bar=1)
self.assertEqual(self.redis.zrange('foo', 0, -1), [b'bar'])
def test_set_nx_doesnt_set_value_twice(self):
self.assertEqual(self.redis.set('foo', 'bar', nx=True), True)
self.assertEqual(self.redis.set('foo', 'bar', nx=True), None)
def test_set_xx_set_value_when_exists(self):
self.assertEqual(self.redis.set('foo', 'bar', xx=True), None)
self.redis.set('foo', 'bar')
self.assertEqual(self.redis.set('foo', 'bar', xx=True), True)
@attr('slow')
def test_set_ex_should_expire_value(self):
self.redis.set('foo', 'bar', ex=0)
self.assertEqual(self.redis.get('foo'), b'bar')
self.redis.set('foo', 'bar', ex=1)
sleep(2)
self.assertEqual(self.redis.get('foo'), None)
@attr('slow')
def test_set_px_should_expire_value(self):
self.redis.set('foo', 'bar', px=500)
sleep(1.5)
self.assertEqual(self.redis.get('foo'), None)
@attr('slow')
def test_psetex_expire_value(self):
with self.assertRaises(ResponseError):
self.redis.psetex('foo', 0, 'bar')
self.redis.psetex('foo', 500, 'bar')
sleep(1.5)
self.assertEqual(self.redis.get('foo'), None)
@attr('slow')
def test_psetex_expire_value_using_timedelta(self):
with self.assertRaises(ResponseError):
self.redis.psetex('foo', timedelta(seconds=0), 'bar')
self.redis.psetex('foo', timedelta(seconds=0.5), 'bar')
sleep(1.5)
self.assertEqual(self.redis.get('foo'), None)
@attr('slow')
def test_expire_should_expire_key(self):
self.redis.set('foo', 'bar')
self.assertEqual(self.redis.get('foo'), b'bar')
self.redis.expire('foo', 1)
sleep(1.5)
self.assertEqual(self.redis.get('foo'), None)
self.assertEqual(self.redis.expire('bar', 1), False)
def test_expire_should_return_true_for_existing_key(self):
self.redis.set('foo', 'bar')
rv = self.redis.expire('foo', 1)
self.assertIs(rv, True)
def test_expire_should_return_false_for_missing_key(self):
rv = self.redis.expire('missing', 1)
self.assertIs(rv, False)
@attr('slow')
def test_expire_should_expire_key_using_timedelta(self):
self.redis.set('foo', 'bar')
self.assertEqual(self.redis.get('foo'), b'bar')
self.redis.expire('foo', timedelta(seconds=1))
sleep(1.5)
self.assertEqual(self.redis.get('foo'), None)
self.assertEqual(self.redis.expire('bar', 1), False)
@attr('slow')
def test_expireat_should_expire_key_by_datetime(self):
self.redis.set('foo', 'bar')
self.assertEqual(self.redis.get('foo'), b'bar')
self.redis.expireat('foo', datetime.now() + timedelta(seconds=1))
sleep(1.5)
self.assertEqual(self.redis.get('foo'), None)
self.assertEqual(self.redis.expireat('bar', datetime.now()), False)
@attr('slow')
def test_expireat_should_expire_key_by_timestamp(self):
self.redis.set('foo', 'bar')
self.assertEqual(self.redis.get('foo'), b'bar')
self.redis.expireat('foo', int(time() + 1))
sleep(1.5)
self.assertEqual(self.redis.get('foo'), None)
self.assertEqual(self.redis.expire('bar', 1), False)
def test_expireat_should_return_true_for_existing_key(self):
self.redis.set('foo', 'bar')
rv = self.redis.expireat('foo', int(time() + 1))
self.assertIs(rv, True)
def test_expireat_should_return_false_for_missing_key(self):
rv = self.redis.expireat('missing', int(time() + 1))
self.assertIs(rv, False)
def test_ttl_should_return_none_for_non_expiring_key(self):
self.redis.set('foo', 'bar')
self.assertEqual(self.redis.get('foo'), b'bar')
self.assertEqual(self.redis.ttl('foo'), None)
def test_ttl_should_return_value_for_expiring_key(self):
self.redis.set('foo', 'bar')
self.redis.expire('foo', 1)
self.assertEqual(self.redis.ttl('foo'), 1)
self.redis.expire('foo', 2)
self.assertEqual(self.redis.ttl('foo'), 2)
long_long_c_max = 100000000000
# See https://github.com/antirez/redis/blob/unstable/src/db.c#L632
self.redis.expire('foo', long_long_c_max)
self.assertEqual(self.redis.ttl('foo'), long_long_c_max)
def test_pttl_should_return_none_for_non_expiring_key(self):
self.redis.set('foo', 'bar')
self.assertEqual(self.redis.get('foo'), b'bar')
self.assertEqual(self.redis.pttl('foo'), None)
def test_pttl_should_return_value_for_expiring_key(self):
d = 100
self.redis.set('foo', 'bar')
self.redis.expire('foo', 1)
self.assertInRange(self.redis.pttl('foo'), 1000 - d, 1000)
self.redis.expire('foo', 2)
self.assertInRange(self.redis.pttl('foo'), 2000 - d, 2000)
long_long_c_max = 100000000000
# See https://github.com/antirez/redis/blob/unstable/src/db.c#L632
self.redis.expire('foo', long_long_c_max)
self.assertInRange(self.redis.pttl('foo'),
long_long_c_max * 1000 - d,
long_long_c_max * 1000)
@redis_must_be_running
class TestRealRedis(TestFakeRedis):
def create_redis(self, db=0):
return redis.Redis('localhost', port=6379, db=db)
@redis_must_be_running
class TestRealStrictRedis(TestFakeStrictRedis):
def create_redis(self, db=0):
return redis.StrictRedis('localhost', port=6379, db=db)
class TestInitArgs(unittest.TestCase):
def test_can_accept_any_kwargs(self):
fakeredis.FakeRedis(foo='bar', bar='baz')
fakeredis.FakeStrictRedis(foo='bar', bar='baz')
def test_from_url(self):
db = fakeredis.FakeStrictRedis.from_url(
'redis://username:password@localhost:6379/0')
db.set('foo', 'bar')
self.assertEqual(db.get('foo'), b'bar')
def test_from_url_with_db_arg(self):
db = fakeredis.FakeStrictRedis.from_url(
'redis://username:password@localhost:6379/0')
db1 = fakeredis.FakeStrictRedis.from_url(
'redis://username:password@localhost:6379/1')
db2 = fakeredis.FakeStrictRedis.from_url(
'redis://username:password@localhost:6379/',
db=2)
db.set('foo', 'foo0')
db1.set('foo', 'foo1')
db2.set('foo', 'foo2')
self.assertEqual(db.get('foo'), b'foo0')
self.assertEqual(db1.get('foo'), b'foo1')
self.assertEqual(db2.get('foo'), b'foo2')
def test_from_url_db_value_error(self):
# In ValueError, should default to 0
db = fakeredis.FakeStrictRedis.from_url(
'redis://username:password@localhost:6379/a')
self.assertEqual(db._db_num, 0)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "2749ab01971592c448f1e39d670dd95a",
"timestamp": "",
"source": "github",
"line_count": 2032,
"max_line_length": 96,
"avg_line_length": 40.57381889763779,
"alnum_prop": 0.5650607670475221,
"repo_name": "sam-untapt/fakeredis",
"id": "9b8fe79f25e47ab59d05c28b241c38215df61e08",
"size": "82468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_fakeredis.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "130828"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import re
import urlparse
import scrapy
from scrapy.exceptions import CloseSpider
from scrapy.loader import ItemLoader
from ..items import Link
def should_follow(url):
bits = urlparse.urlparse(url)
if bits.hostname in ('baseballprospectus.com',
'www.baseballprospectus.com'):
# limit only to /article.php
return ((bits.path.startswith('/a/')
or bits.path.startswith('/article.php')))
return True
def normalize_url(url):
"""Cleans up a BPro URL
"""
bits = urlparse.urlparse(url, allow_fragments=False)
bits = list(bits)
# clean up query string
qs = urlparse.parse_qs(bits[4])
# normalize article links
if '/article.php' in bits[2]:
qs = dict(filter(lambda (k, v): k == 'articleid', qs.items()))
# revert /a/ links to article.php links
elif '/a/' in bits[2]:
bits[2] = '/article.php'
qs = {'articleid': [bits[2].split('/')[-1]]}
qs = '&'.join([
'{}={}'.format(k, v[0])
for (k, v)
in qs.items()
])
bits[4] = qs
return urlparse.urlunparse(bits)
class BproSpider(scrapy.Spider):
name = 'bpro'
allowed_domains = ('baseballprospectus.com', 'google.com', 'bbp.cx', )
start_urls = (
'http://www.baseballprospectus.com/articles/',
)
def __init__(self, *a, **kw):
super(BproSpider, self).__init__(*a, **kw)
self.username = getattr(self.settings, 'BPRO_USERNAME')
self.password = getattr(self.settings, 'BPRO_PASSWORD')
if not self.username or not self.password:
raise CloseSpider(
'Please set BPRO_USERNAME and BPRO_PASSWORD settings '
'with valid Baseball Prospectus account credentials')
def start_requests(self):
resp = scrapy.FormRequest(
'http://www.baseballprospectus.com/ajax/session_handler.php',
formdata={'username': self.username,
'password': self.password,
'type': 'login',
'action': 'muffinklezmer'},
callback=self.post_login)
return [resp]
def post_login(self, response):
yield scrapy.Request('http://www.baseballprospectus.com/articles/',
callback=self.parse)
def parse(self, response):
"""Extracts links from "All Articles" list.
Links are then sent to `self.extract_article_links`
for processing.
"""
# find links in list
article_links = (
response
.css('.articleHead table tr td:first-of-type a::attr(href)')
.extract())
for link in article_links:
url = urlparse.urljoin(response.url, link)
url = normalize_url(url)
yield scrapy.Request(url, callback=self.extract_article_links)
# respawn if pagination controls are present
pagination = (
response
.xpath('//a[contains(., "Previous Article Entries")]/@href')
.extract())
if len(pagination) > 0:
href = pagination.pop(0)
url = urlparse.urljoin(response.url, href)
self.logger.info('Requesting next article list URL: ' + url)
yield scrapy.Request(url, callback=self.parse)
def extract_article_links(self, response):
"""Extracts links from an article page, then follows each
same-domain URL to further link extraction.
"""
paywalled = (
response
.xpath('//h1[contains(., "The rest of this article is '
'restricted to Baseball Prospectus Subscribers.")]')
.extract())
if len(paywalled) > 0:
raise StopIteration()
links = response.css('.article a::attr(href)').extract()
for link in links:
dest_url = urlparse.urljoin(response.url, link)
if not should_follow(dest_url):
self.logger.warning('Skipping {}'.format(dest_url))
continue
dest_url = normalize_url(dest_url)
for pattern in IGNORE:
if pattern.search(dest_url) is not None:
self.logger.debug('Skipping blacklisted URL: '
+ dest_url)
break
else:
pass
self.logger.info('Found relationship: ' + dest_url)
# emit relationship
yield Link(src_url=response.url, dest_url=dest_url)
# extract destination page's outbound links
yield scrapy.Request(dest_url,
callback=self.extract_article_links)
| {
"content_hash": "db5a2b5ef0f7053c3804b83ba357188b",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 75,
"avg_line_length": 31.34640522875817,
"alnum_prop": 0.5565054211843202,
"repo_name": "mattdennewitz/baseball-pagerank",
"id": "f91f00a000606fcc3d2189656c896f894330bbb3",
"size": "4821",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "bbpr/bbpr/spiders/bpro.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14196"
}
],
"symlink_target": ""
} |
"""distutils.msvccompiler
Contains MSVCCompiler, an implementation of the abstract CCompiler class
for the Microsoft Visual Studio.
"""
# Written by Perry Stoll
# hacked by Robin Becker and Thomas Heller to do a better job of
# finding DevStudio (through the registry)
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: msvccompiler.py 50979 2006-07-30 13:27:31Z martin.v.loewis $"
import sys, os, string
from distutils.errors import \
DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils import log
_can_read_reg = 0
try:
import _winreg
_can_read_reg = 1
hkey_mod = _winreg
RegOpenKeyEx = _winreg.OpenKeyEx
RegEnumKey = _winreg.EnumKey
RegEnumValue = _winreg.EnumValue
RegError = _winreg.error
except ImportError:
try:
import win32api
import win32con
_can_read_reg = 1
hkey_mod = win32con
RegOpenKeyEx = win32api.RegOpenKeyEx
RegEnumKey = win32api.RegEnumKey
RegEnumValue = win32api.RegEnumValue
RegError = win32api.error
except ImportError:
log.info("Warning: Can't read registry to find the "
"necessary compiler setting\n"
"Make sure that Python modules _winreg, "
"win32api or win32con are installed.")
pass
if _can_read_reg:
HKEYS = (hkey_mod.HKEY_USERS,
hkey_mod.HKEY_CURRENT_USER,
hkey_mod.HKEY_LOCAL_MACHINE,
hkey_mod.HKEY_CLASSES_ROOT)
def read_keys(base, key):
"""Return list of registry keys."""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
L = []
i = 0
while 1:
try:
k = RegEnumKey(handle, i)
except RegError:
break
L.append(k)
i = i + 1
return L
def read_values(base, key):
"""Return dict of registry keys and values.
All names are converted to lowercase.
"""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
d = {}
i = 0
while 1:
try:
name, value, type = RegEnumValue(handle, i)
except RegError:
break
name = name.lower()
d[convert_mbcs(name)] = convert_mbcs(value)
i = i + 1
return d
def convert_mbcs(s):
enc = getattr(s, "encode", None)
if enc is not None:
try:
s = enc("mbcs")
except UnicodeError:
pass
return s
class MacroExpander:
def __init__(self, version):
self.macros = {}
self.load_macros(version)
def set_macro(self, macro, path, key):
for base in HKEYS:
d = read_values(base, path)
if d:
self.macros["$(%s)" % macro] = d[key]
break
def load_macros(self, version):
vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
net = r"Software\Microsoft\.NETFramework"
self.set_macro("FrameworkDir", net, "installroot")
try:
if version > 7.0:
self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
else:
self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
except KeyError, exc: #
raise DistutilsPlatformError, \
("""Python was built with Visual Studio 2003;
extensions must be built with a compiler than can generate compatible binaries.
Visual Studio 2003 was not found on this system. If you have Cygwin installed,
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
p = r"Software\Microsoft\NET Framework Setup\Product"
for base in HKEYS:
try:
h = RegOpenKeyEx(base, p)
except RegError:
continue
key = RegEnumKey(h, 0)
d = read_values(base, r"%s\%s" % (p, key))
self.macros["$(FrameworkVersion)"] = d["version"]
def sub(self, s):
for k, v in self.macros.items():
s = string.replace(s, k, v)
return s
def get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
prefix = "MSC v."
i = string.find(sys.version, prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def get_build_architecture():
"""Return the processor architecture.
Possible results are "Intel", "Itanium", or "AMD64".
"""
prefix = " bit ("
i = string.find(sys.version, prefix)
if i == -1:
return "Intel"
j = string.find(sys.version, ")", i)
return sys.version[i+len(prefix):j]
class MSVCCompiler (CCompiler) :
"""Concrete class that implements an interface to Microsoft Visual C++,
as defined by the CCompiler abstract class."""
compiler_type = 'msvc'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
_rc_extensions = ['.rc']
_mc_extensions = ['.mc']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = (_c_extensions + _cpp_extensions +
_rc_extensions + _mc_extensions)
res_extension = '.res'
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__ (self, verbose=0, dry_run=0, force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
self.__version = get_build_version()
self.__arch = get_build_architecture()
if self.__arch == "Intel":
# x86
if self.__version >= 7:
self.__root = r"Software\Microsoft\VisualStudio"
self.__macros = MacroExpander(self.__version)
else:
self.__root = r"Software\Microsoft\Devstudio"
self.__product = "Visual Studio version %s" % self.__version
else:
# Win64. Assume this was built with the platform SDK
self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
self.initialized = False
def initialize(self):
self.__paths = []
if os.environ.has_key("DISTUTILS_USE_SDK") and os.environ.has_key("MSSdk") and self.find_exe("cl.exe"):
# Assume that the SDK set up everything alright; don't try to be
# smarter
self.cc = "cl.exe"
self.linker = "link.exe"
self.lib = "lib.exe"
self.rc = "rc.exe"
self.mc = "mc.exe"
else:
self.__paths = self.get_msvc_paths("path")
if len (self.__paths) == 0:
raise DistutilsPlatformError, \
("Python was built with %s, "
"and extensions need to be built with the same "
"version of the compiler, but it isn't installed." % self.__product)
self.cc = self.find_exe("cl.exe")
self.linker = self.find_exe("link.exe")
self.lib = self.find_exe("lib.exe")
self.rc = self.find_exe("rc.exe") # resource compiler
self.mc = self.find_exe("mc.exe") # message compiler
self.set_path_env_var('lib')
self.set_path_env_var('include')
# extend the MSVC path with the current path
try:
for p in string.split(os.environ['path'], ';'):
self.__paths.append(p)
except KeyError:
pass
os.environ['path'] = string.join(self.__paths, ';')
self.preprocess_options = None
if self.__arch == "Intel":
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
'/Z7', '/D_DEBUG']
else:
# Win64
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
'/Z7', '/D_DEBUG']
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
if self.__version >= 7:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
]
else:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
]
self.ldflags_static = [ '/nologo']
self.initialized = True
# -- Worker methods ------------------------------------------------
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
# Copied from ccompiler.py, extended to return .res as 'object'-file
# for .rc input file
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
(base, ext) = os.path.splitext (src_name)
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if ext not in self.src_extensions:
# Better to raise an exception instead of silently continuing
# and later complain about sources and targets having
# different lengths
raise CompileError ("Don't know how to compile %s" % src_name)
if strip_dir:
base = os.path.basename (base)
if ext in self._rc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
elif ext in self._mc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
if not self.initialized: self.initialize()
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
compile_opts = extra_preargs or []
compile_opts.append ('/c')
if debug:
compile_opts.extend(self.compile_options_debug)
else:
compile_opts.extend(self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
if debug:
# pass the full pathname to MSVC in debug mode,
# this allows the debugger to find the source file
# without asking the user to browse for it
src = os.path.abspath(src)
if ext in self._c_extensions:
input_opt = "/Tc" + src
elif ext in self._cpp_extensions:
input_opt = "/Tp" + src
elif ext in self._rc_extensions:
# compile .RC to .RES file
input_opt = src
output_opt = "/fo" + obj
try:
self.spawn ([self.rc] + pp_opts +
[output_opt] + [input_opt])
except DistutilsExecError, msg:
raise CompileError, msg
continue
elif ext in self._mc_extensions:
# Compile .MC to .RC file to .RES file.
# * '-h dir' specifies the directory for the
# generated include file
# * '-r dir' specifies the target directory of the
# generated RC file and the binary message resource
# it includes
#
# For now (since there are no options to change this),
# we use the source-directory for the include file and
# the build directory for the RC file and message
# resources. This works at least for win32all.
h_dir = os.path.dirname (src)
rc_dir = os.path.dirname (obj)
try:
# first compile .MC to .RC and .H file
self.spawn ([self.mc] +
['-h', h_dir, '-r', rc_dir] + [src])
base, _ = os.path.splitext (os.path.basename (src))
rc_file = os.path.join (rc_dir, base + '.rc')
# then compile .RC to .RES file
self.spawn ([self.rc] +
["/fo" + obj] + [rc_file])
except DistutilsExecError, msg:
raise CompileError, msg
continue
else:
# how to handle this file?
raise CompileError (
"Don't know how to compile %s to %s" % \
(src, obj))
output_opt = "/Fo" + obj
try:
self.spawn ([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
return objects
# compile ()
def create_static_lib (self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
if not self.initialized: self.initialize()
(objects, output_dir) = self._fix_object_args (objects, output_dir)
output_filename = \
self.library_filename (output_libname, output_dir=output_dir)
if self._need_link (objects, output_filename):
lib_args = objects + ['/OUT:' + output_filename]
if debug:
pass # XXX what goes here?
try:
self.spawn ([self.lib] + lib_args)
except DistutilsExecError, msg:
raise LibError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# create_static_lib ()
def link (self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
if not self.initialized: self.initialize()
(objects, output_dir) = self._fix_object_args (objects, output_dir)
(libraries, library_dirs, runtime_library_dirs) = \
self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
if runtime_library_dirs:
self.warn ("I don't know what to do with 'runtime_library_dirs': "
+ str (runtime_library_dirs))
lib_opts = gen_lib_options (self,
library_dirs, runtime_library_dirs,
libraries)
if output_dir is not None:
output_filename = os.path.join (output_dir, output_filename)
if self._need_link (objects, output_filename):
if target_desc == CCompiler.EXECUTABLE:
if debug:
ldflags = self.ldflags_shared_debug[1:]
else:
ldflags = self.ldflags_shared[1:]
else:
if debug:
ldflags = self.ldflags_shared_debug
else:
ldflags = self.ldflags_shared
export_opts = []
for sym in (export_symbols or []):
export_opts.append("/EXPORT:" + sym)
ld_args = (ldflags + lib_opts + export_opts +
objects + ['/OUT:' + output_filename])
# The MSVC linker generates .lib and .exp files, which cannot be
# suppressed by any linker switches. The .lib files may even be
# needed! Make sure they are generated in the temporary build
# directory. Since they have different names for debug and release
# builds, they can go into the same directory.
if export_symbols is not None:
(dll_name, dll_ext) = os.path.splitext(
os.path.basename(output_filename))
implib_file = os.path.join(
os.path.dirname(objects[0]),
self.library_filename(dll_name))
ld_args.append ('/IMPLIB:' + implib_file)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath (os.path.dirname (output_filename))
try:
self.spawn ([self.linker] + ld_args)
except DistutilsExecError, msg:
raise LinkError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# link ()
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option (self, dir):
return "/LIBPATH:" + dir
def runtime_library_dir_option (self, dir):
raise DistutilsPlatformError, \
"don't know how to set runtime library search path for MSVC++"
def library_option (self, lib):
return self.library_filename (lib)
def find_library_file (self, dirs, lib, debug=0):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
try_names = [lib + "_d", lib]
else:
try_names = [lib]
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename (name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# find_library_file ()
# Helper methods for using the MSVC registry settings
def find_exe(self, exe):
"""Return path to an MSVC executable program.
Tries to find the program in several places: first, one of the
MSVC program search paths from the registry; next, the directories
in the PATH environment variable. If any of those work, return an
absolute path that is known to exist. If none of them work, just
return the original program name, 'exe'.
"""
for p in self.__paths:
fn = os.path.join(os.path.abspath(p), exe)
if os.path.isfile(fn):
return fn
# didn't find it; try existing path
for p in string.split(os.environ['Path'],';'):
fn = os.path.join(os.path.abspath(p),exe)
if os.path.isfile(fn):
return fn
return exe
def get_msvc_paths(self, path, platform='x86'):
"""Get a list of devstudio directories (include, lib or path).
Return a list of strings. The list will be empty if unable to
access the registry or appropriate registry keys not found.
"""
if not _can_read_reg:
return []
path = path + " dirs"
if self.__version >= 7:
key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
% (self.__root, self.__version))
else:
key = (r"%s\6.0\Build System\Components\Platforms"
r"\Win32 (%s)\Directories" % (self.__root, platform))
for base in HKEYS:
d = read_values(base, key)
if d:
if self.__version >= 7:
return string.split(self.__macros.sub(d[path]), ";")
else:
return string.split(d[path], ";")
# MSVC 6 seems to create the registry entries we need only when
# the GUI is run.
if self.__version == 6:
for base in HKEYS:
if read_values(base, r"%s\6.0" % self.__root) is not None:
self.warn("It seems you have Visual Studio 6 installed, "
"but the expected registry settings are not present.\n"
"You must at least run the Visual Studio GUI once "
"so that these entries are created.")
break
return []
def set_path_env_var(self, name):
"""Set environment variable 'name' to an MSVC path type value.
This is equivalent to a SET command prior to execution of spawned
commands.
"""
if name == "lib":
p = self.get_msvc_paths("library")
else:
p = self.get_msvc_paths(name)
if p:
os.environ[name] = string.join(p, ';')
| {
"content_hash": "01b930954fb153119838c428f001f568",
"timestamp": "",
"source": "github",
"line_count": 638,
"max_line_length": 111,
"avg_line_length": 35.88871473354232,
"alnum_prop": 0.5243918417259903,
"repo_name": "MalloyPower/parsing-python",
"id": "d2064a29f34b69827f422e1fbb9b88ebf5d81ebf",
"size": "22897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-2.5/Lib/distutils/msvccompiler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
import io
import json
import sqlite3
input = io.open("metadata/FCBH/volume_list.txt", mode="r", encoding="utf-8")
data = input.read()
try:
volumes = json.loads(data)
except Exception, err:
print "Could not parse volume_list.txt", str(err)
input.close()
assetIds = set()
for volume in volumes:
damId = volume["dam_id"]
assetIds.add(damId)
print len(assetIds)
db = sqlite3.connect('Versions.db')
cursor = db.cursor()
sql = "SELECT bibleId, code, abbr, iso3, otDamId, ntDamId FROM Bible ORDER BY code"
values = ()
cursor.execute(sql, values)
rows = cursor.fetchall()
for row in rows:
if row[4] != None:
otDamId = row[4].split("/")[2]
if otDamId not in assetIds:
print "missing", otDamId
else:
print "ok", otDamId
if row[5] != None:
ntDamId = row[5].split("/")[2]
if ntDamId not in assetIds:
print "missing", ntDamId
else:
print "ok", ntDamId
| {
"content_hash": "0b2aefdf477cd974632b0252ea9f0b13",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 83,
"avg_line_length": 20.46511627906977,
"alnum_prop": 0.675,
"repo_name": "garygriswold/Bible.js",
"id": "4c3d406d984eb2ec91154bf0d34bf00a6fca91d4",
"size": "1070",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Versions/Release.2.x.x/py/OBSOLETE/BibleRemoveNonPublic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "363488"
},
{
"name": "C#",
"bytes": "10971"
},
{
"name": "C++",
"bytes": "33464"
},
{
"name": "CSS",
"bytes": "150680"
},
{
"name": "HTML",
"bytes": "117756"
},
{
"name": "Java",
"bytes": "587962"
},
{
"name": "JavaScript",
"bytes": "1391307"
},
{
"name": "Objective-C",
"bytes": "857597"
},
{
"name": "PLpgSQL",
"bytes": "865"
},
{
"name": "Python",
"bytes": "55084"
},
{
"name": "Shell",
"bytes": "60975"
},
{
"name": "Swift",
"bytes": "846467"
}
],
"symlink_target": ""
} |
import requests
import shutil
import tempfile
import os
class Restclient(object):
def __init__(self,host,username,password):
self.host = host
self.auth = (username,password)
def download_study_file(self,studyid,directory=None):
headers = {'Accept':'application/x-hdf'}
URL = '%s/provider/study/%s/pvalues.hdf5' % (self.host,studyid)
r = requests.get(URL,headers=headers,auth=self.auth,stream=True)
if r.status_code == 200:
fd,path = tempfile.mkstemp(suffix='.hdf5',dir=directory)
with os.fdopen(fd,'w') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
return path
raise Exception(r.text)
def get_candidate_genes(self,candidate_gene_id):
headers = {'Accept':'application/json'}
URL = '%s/provider/candidategenelist/%s/genes' % (self.host,candidate_gene_id)
r = requests.get(URL,headers=headers,auth=self.auth)
if r.status_code == 200:
return r.json()
raise Exception(r.text)
def get_phenotype_data(self,studyid):
headers = {'Accept':'application/json'}
URL = '%s/provider/study/%s/studygwasdata' % (self.host,studyid)
r = requests.get(URL,headers=headers,auth=self.auth)
if r.status_code == 200:
return r.json()
raise Exception(r.text)
def upload_study_file(self,hdf5_file,studyid):
headers = {'Accept':'application/json'}
URL = '%s/provider/study/%s/store' % (self.host,studyid)
files = {'file': ('%s.hdf5'% studyid,open(hdf5_file,'rb'),'application/x-hdf')}
r = requests.post(URL,headers=headers,auth=self.auth,files=files)
if r.status_code == 200:
if r.json() == studyid:
return
raise Exception(r.text)
| {
"content_hash": "a73b4b63406dcc14925697320571e787",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 87,
"avg_line_length": 37.12,
"alnum_prop": 0.6007543103448276,
"repo_name": "timeu/gwaportal-analysis-pipeline",
"id": "bb742885db07fddd6ea8e1fb49114c9d60a7023b",
"size": "1856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gwaportalpipeline/rest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16912"
}
],
"symlink_target": ""
} |
"""
Upload handlers to test the upload API.
"""
from django.core.files.uploadhandler import FileUploadHandler, StopUpload
class QuotaUploadHandler(FileUploadHandler):
"""
This test upload handler terminates the connection if more than a quota
(5MB) is uploaded.
"""
QUOTA = 5 * 2 ** 20 # 5 MB
def __init__(self, request=None):
super(QuotaUploadHandler, self).__init__(request)
self.total_upload = 0
def receive_data_chunk(self, raw_data, start):
self.total_upload += len(raw_data)
if self.total_upload >= self.QUOTA:
raise StopUpload(connection_reset=True)
return raw_data
def file_complete(self, file_size):
return None
class CustomUploadError(Exception):
pass
class ErroringUploadHandler(FileUploadHandler):
"""A handler that raises an exception."""
def receive_data_chunk(self, raw_data, start):
raise CustomUploadError("Oops!")
| {
"content_hash": "996cd93b3070f0660e12e71c060ae745",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 75,
"avg_line_length": 26.83783783783784,
"alnum_prop": 0.6404833836858006,
"repo_name": "yephper/django",
"id": "4fd6d1e1e426b9d8a1e1f2f818056e3f80913061",
"size": "993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/file_uploads/uploadhandler.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "1538"
},
{
"name": "CSS",
"bytes": "1697381"
},
{
"name": "HTML",
"bytes": "390772"
},
{
"name": "Java",
"bytes": "588"
},
{
"name": "JavaScript",
"bytes": "3172126"
},
{
"name": "Makefile",
"bytes": "134"
},
{
"name": "PHP",
"bytes": "19336"
},
{
"name": "Python",
"bytes": "13365273"
},
{
"name": "Shell",
"bytes": "837"
},
{
"name": "Smarty",
"bytes": "133"
}
],
"symlink_target": ""
} |
"""
flask.helpers
~~~~~~~~~~~~~
Implements various helpers.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import pkgutil
import posixpath
import mimetypes
from time import time
from zlib import adler32
from threading import RLock
from werkzeug.routing import BuildError
from functools import update_wrapper
try:
from werkzeug.urls import url_quote
except ImportError:
from urlparse import quote as url_quote
from werkzeug.datastructures import Headers
from werkzeug.exceptions import NotFound
# this was moved in 0.7
try:
from werkzeug.wsgi import wrap_file
except ImportError:
from werkzeug.utils import wrap_file
from jinja2 import FileSystemLoader
from .signals import message_flashed
from .globals import session, _request_ctx_stack, _app_ctx_stack, \
current_app, request
from ._compat import string_types, text_type
# sentinel
_missing = object()
# what separators does this operating system provide that are not a slash?
# this is used by the send_from_directory function to ensure that nobody is
# able to access files from outside the filesystem.
_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep]
if sep not in (None, '/'))
def _endpoint_from_view_func(view_func):
"""Internal helper that returns the default endpoint for a given
function. This always is the function name.
"""
assert view_func is not None, 'expected view func if endpoint ' \
'is not provided.'
return view_func.__name__
def stream_with_context(generator_or_function):
"""Request contexts disappear when the response is started on the server.
This is done for efficiency reasons and to make it less likely to encounter
memory leaks with badly written WSGI middlewares. The downside is that if
you are using streamed responses, the generator cannot access request bound
information any more.
This function however can help you keep the context around for longer::
from flask import stream_with_context, request, Response
@app.route('/stream')
def streamed_response():
@stream_with_context
def generate():
yield 'Hello '
yield request.args['name']
yield '!'
return Response(generate())
Alternatively it can also be used around a specific generator::
from flask import stream_with_context, request, Response
@app.route('/stream')
def streamed_response():
def generate():
yield 'Hello '
yield request.args['name']
yield '!'
return Response(stream_with_context(generate()))
.. versionadded:: 0.9
"""
try:
gen = iter(generator_or_function)
except TypeError:
def decorator(*args, **kwargs):
gen = generator_or_function()
return stream_with_context(gen)
return update_wrapper(decorator, generator_or_function)
def generator():
ctx = _request_ctx_stack.top
if ctx is None:
raise RuntimeError('Attempted to stream with context but '
'there was no context in the first place to keep around.')
with ctx:
# Dummy sentinel. Has to be inside the context block or we're
# not actually keeping the context around.
yield None
# The try/finally is here so that if someone passes a WSGI level
# iterator in we're still running the cleanup logic. Generators
# don't need that because they are closed on their destruction
# automatically.
try:
for item in gen:
yield item
finally:
if hasattr(gen, 'close'):
gen.close()
# The trick is to start the generator. Then the code execution runs until
# the first dummy None is yielded at which point the context was already
# pushed. This item is discarded. Then when the iteration continues the
# real generator is executed.
wrapped_g = generator()
next(wrapped_g)
return wrapped_g
def make_response(*args):
"""Sometimes it is necessary to set additional headers in a view. Because
views do not have to return response objects but can return a value that
is converted into a response object by Flask itself, it becomes tricky to
add headers to it. This function can be called instead of using a return
and you will get a response object which you can use to attach headers.
If view looked like this and you want to add a new header::
def index():
return render_template('index.html', foo=42)
You can now do something like this::
def index():
response = make_response(render_template('index.html', foo=42))
response.headers['X-Parachutes'] = 'parachutes are cool'
return response
This function accepts the very same arguments you can return from a
view function. This for example creates a response with a 404 error
code::
response = make_response(render_template('not_found.html'), 404)
The other use case of this function is to force the return value of a
view function into a response which is helpful with view
decorators::
response = make_response(view_function())
response.headers['X-Parachutes'] = 'parachutes are cool'
Internally this function does the following things:
- if no arguments are passed, it creates a new response argument
- if one argument is passed, :meth:`flask.Flask.make_response`
is invoked with it.
- if more than one argument is passed, the arguments are passed
to the :meth:`flask.Flask.make_response` function as tuple.
.. versionadded:: 0.6
"""
if not args:
return current_app.response_class()
if len(args) == 1:
args = args[0]
return current_app.make_response(args)
def url_for(endpoint, **values):
"""Generates a URL to the given endpoint with the method provided.
Variable arguments that are unknown to the target endpoint are appended
to the generated URL as query arguments. If the value of a query argument
is `None`, the whole pair is skipped. In case blueprints are active
you can shortcut references to the same blueprint by prefixing the
local endpoint with a dot (``.``).
This will reference the index function local to the current blueprint::
url_for('.index')
For more information, head over to the :ref:`Quickstart <url-building>`.
To integrate applications, :class:`Flask` has a hook to intercept URL build
errors through :attr:`Flask.url_build_error_handlers`. The `url_for`
function results in a :exc:`~werkzeug.routing.BuildError` when the current
app does not have a URL for the given endpoint and values. When it does, the
:data:`~flask.current_app` calls its :attr:`~Flask.url_build_error_handlers` if
it is not `None`, which can return a string to use as the result of
`url_for` (instead of `url_for`'s default to raise the
:exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception.
An example::
def external_url_handler(error, endpoint, values):
"Looks up an external URL when `url_for` cannot build a URL."
# This is an example of hooking the build_error_handler.
# Here, lookup_url is some utility function you've built
# which looks up the endpoint in some external URL registry.
url = lookup_url(endpoint, **values)
if url is None:
# External lookup did not have a URL.
# Re-raise the BuildError, in context of original traceback.
exc_type, exc_value, tb = sys.exc_info()
if exc_value is error:
raise exc_type, exc_value, tb
else:
raise error
# url_for will use this result, instead of raising BuildError.
return url
app.url_build_error_handlers.append(external_url_handler)
Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and
`endpoint` and `values` are the arguments passed into `url_for`. Note
that this is for building URLs outside the current application, and not for
handling 404 NotFound errors.
.. versionadded:: 0.10
The `_scheme` parameter was added.
.. versionadded:: 0.9
The `_anchor` and `_method` parameters were added.
.. versionadded:: 0.9
Calls :meth:`Flask.handle_build_error` on
:exc:`~werkzeug.routing.BuildError`.
:param endpoint: the endpoint of the URL (name of the function)
:param values: the variable arguments of the URL rule
:param _external: if set to `True`, an absolute URL is generated. Server
address can be changed via `SERVER_NAME` configuration variable which
defaults to `localhost`.
:param _scheme: a string specifying the desired URL scheme. The `_external`
parameter must be set to `True` or a `ValueError` is raised. The default
behavior uses the same scheme as the current request, or
``PREFERRED_URL_SCHEME`` from the :ref:`app configuration <config>` if no
request context is available. As of Werkzeug 0.10, this also can be set
to an empty string to build protocol-relative URLs.
:param _anchor: if provided this is added as anchor to the URL.
:param _method: if provided this explicitly specifies an HTTP method.
"""
appctx = _app_ctx_stack.top
reqctx = _request_ctx_stack.top
if appctx is None:
raise RuntimeError('Attempted to generate a URL without the '
'application context being pushed. This has to be '
'executed when application context is available.')
# If request specific information is available we have some extra
# features that support "relative" urls.
if reqctx is not None:
url_adapter = reqctx.url_adapter
blueprint_name = request.blueprint
if not reqctx.request._is_old_module:
if endpoint[:1] == '.':
if blueprint_name is not None:
endpoint = blueprint_name + endpoint
else:
endpoint = endpoint[1:]
else:
# TODO: get rid of this deprecated functionality in 1.0
if '.' not in endpoint:
if blueprint_name is not None:
endpoint = blueprint_name + '.' + endpoint
elif endpoint.startswith('.'):
endpoint = endpoint[1:]
external = values.pop('_external', False)
# Otherwise go with the url adapter from the appctx and make
# the urls external by default.
else:
url_adapter = appctx.url_adapter
if url_adapter is None:
raise RuntimeError('Application was not able to create a URL '
'adapter for request independent URL generation. '
'You might be able to fix this by setting '
'the SERVER_NAME config variable.')
external = values.pop('_external', True)
anchor = values.pop('_anchor', None)
method = values.pop('_method', None)
scheme = values.pop('_scheme', None)
appctx.app.inject_url_defaults(endpoint, values)
if scheme is not None:
if not external:
raise ValueError('When specifying _scheme, _external must be True')
url_adapter.url_scheme = scheme
try:
rv = url_adapter.build(endpoint, values, method=method,
force_external=external)
except BuildError as error:
# We need to inject the values again so that the app callback can
# deal with that sort of stuff.
values['_external'] = external
values['_anchor'] = anchor
values['_method'] = method
return appctx.app.handle_url_build_error(error, endpoint, values)
if anchor is not None:
rv += '#' + url_quote(anchor)
return rv
def get_template_attribute(template_name, attribute):
"""Loads a macro (or variable) a template exports. This can be used to
invoke a macro from within Python code. If you for example have a
template named `_cider.html` with the following contents:
.. sourcecode:: html+jinja
{% macro hello(name) %}Hello {{ name }}!{% endmacro %}
You can access this from Python code like this::
hello = get_template_attribute('_cider.html', 'hello')
return hello('World')
.. versionadded:: 0.2
:param template_name: the name of the template
:param attribute: the name of the variable of macro to access
"""
return getattr(current_app.jinja_env.get_template(template_name).module,
attribute)
def flash(message, category='message'):
"""Flashes a message to the next request. In order to remove the
flashed message from the session and to display it to the user,
the template has to call :func:`get_flashed_messages`.
.. versionchanged:: 0.3
`category` parameter added.
:param message: the message to be flashed.
:param category: the category for the message. The following values
are recommended: ``'message'`` for any kind of message,
``'error'`` for errors, ``'info'`` for information
messages and ``'warning'`` for warnings. However any
kind of string can be used as category.
"""
# Original implementation:
#
# session.setdefault('_flashes', []).append((category, message))
#
# This assumed that changes made to mutable structures in the session are
# are always in sync with the session object, which is not true for session
# implementations that use external storage for keeping their keys/values.
flashes = session.get('_flashes', [])
flashes.append((category, message))
session['_flashes'] = flashes
message_flashed.send(current_app._get_current_object(),
message=message, category=category)
def get_flashed_messages(with_categories=False, category_filter=[]):
"""Pulls all flashed messages from the session and returns them.
Further calls in the same request to the function will return
the same messages. By default just the messages are returned,
but when `with_categories` is set to `True`, the return value will
be a list of tuples in the form ``(category, message)`` instead.
Filter the flashed messages to one or more categories by providing those
categories in `category_filter`. This allows rendering categories in
separate html blocks. The `with_categories` and `category_filter`
arguments are distinct:
* `with_categories` controls whether categories are returned with message
text (`True` gives a tuple, where `False` gives just the message text).
* `category_filter` filters the messages down to only those matching the
provided categories.
See :ref:`message-flashing-pattern` for examples.
.. versionchanged:: 0.3
`with_categories` parameter added.
.. versionchanged:: 0.9
`category_filter` parameter added.
:param with_categories: set to `True` to also receive categories.
:param category_filter: whitelist of categories to limit return values
"""
flashes = _request_ctx_stack.top.flashes
if flashes is None:
_request_ctx_stack.top.flashes = flashes = session.pop('_flashes') \
if '_flashes' in session else []
if category_filter:
flashes = list(filter(lambda f: f[0] in category_filter, flashes))
if not with_categories:
return [x[1] for x in flashes]
return flashes
def send_file(filename_or_fp, mimetype=None, as_attachment=False,
attachment_filename=None, add_etags=True,
cache_timeout=None, conditional=False):
"""Sends the contents of a file to the client. This will use the
most efficient method available and configured. By default it will
try to use the WSGI server's file_wrapper support. Alternatively
you can set the application's :attr:`~Flask.use_x_sendfile` attribute
to ``True`` to directly emit an `X-Sendfile` header. This however
requires support of the underlying webserver for `X-Sendfile`.
By default it will try to guess the mimetype for you, but you can
also explicitly provide one. For extra security you probably want
to send certain files as attachment (HTML for instance). The mimetype
guessing requires a `filename` or an `attachment_filename` to be
provided.
Please never pass filenames to this function from user sources without
checking them first. Something like this is usually sufficient to
avoid security problems::
if '..' in filename or filename.startswith('/'):
abort(404)
.. versionadded:: 0.2
.. versionadded:: 0.5
The `add_etags`, `cache_timeout` and `conditional` parameters were
added. The default behavior is now to attach etags.
.. versionchanged:: 0.7
mimetype guessing and etag support for file objects was
deprecated because it was unreliable. Pass a filename if you are
able to, otherwise attach an etag yourself. This functionality
will be removed in Flask 1.0
.. versionchanged:: 0.9
cache_timeout pulls its default from application config, when None.
:param filename_or_fp: the filename of the file to send. This is
relative to the :attr:`~Flask.root_path` if a
relative path is specified.
Alternatively a file object might be provided
in which case `X-Sendfile` might not work and
fall back to the traditional method. Make sure
that the file pointer is positioned at the start
of data to send before calling :func:`send_file`.
:param mimetype: the mimetype of the file if provided, otherwise
auto detection happens.
:param as_attachment: set to `True` if you want to send this file with
a ``Content-Disposition: attachment`` header.
:param attachment_filename: the filename for the attachment if it
differs from the file's filename.
:param add_etags: set to `False` to disable attaching of etags.
:param conditional: set to `True` to enable conditional responses.
:param cache_timeout: the timeout in seconds for the headers. When `None`
(default), this value is set by
:meth:`~Flask.get_send_file_max_age` of
:data:`~flask.current_app`.
"""
mtime = None
if isinstance(filename_or_fp, string_types):
filename = filename_or_fp
file = None
else:
from warnings import warn
file = filename_or_fp
filename = getattr(file, 'name', None)
# XXX: this behavior is now deprecated because it was unreliable.
# removed in Flask 1.0
if not attachment_filename and not mimetype \
and isinstance(filename, string_types):
warn(DeprecationWarning('The filename support for file objects '
'passed to send_file is now deprecated. Pass an '
'attach_filename if you want mimetypes to be guessed.'),
stacklevel=2)
if add_etags:
warn(DeprecationWarning('In future flask releases etags will no '
'longer be generated for file objects passed to the send_file '
'function because this behavior was unreliable. Pass '
'filenames instead if possible, otherwise attach an etag '
'yourself based on another value'), stacklevel=2)
if filename is not None:
if not os.path.isabs(filename):
filename = os.path.join(current_app.root_path, filename)
if mimetype is None and (filename or attachment_filename):
mimetype = mimetypes.guess_type(filename or attachment_filename)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
headers = Headers()
if as_attachment:
if attachment_filename is None:
if filename is None:
raise TypeError('filename unavailable, required for '
'sending as attachment')
attachment_filename = os.path.basename(filename)
headers.add('Content-Disposition', 'attachment',
filename=attachment_filename)
if current_app.use_x_sendfile and filename:
if file is not None:
file.close()
headers['X-Sendfile'] = filename
headers['Content-Length'] = os.path.getsize(filename)
data = None
else:
if file is None:
file = open(filename, 'rb')
mtime = os.path.getmtime(filename)
headers['Content-Length'] = os.path.getsize(filename)
data = wrap_file(request.environ, file)
rv = current_app.response_class(data, mimetype=mimetype, headers=headers,
direct_passthrough=True)
# if we know the file modification date, we can store it as the
# the time of the last modification.
if mtime is not None:
rv.last_modified = int(mtime)
rv.cache_control.public = True
if cache_timeout is None:
cache_timeout = current_app.get_send_file_max_age(filename)
if cache_timeout is not None:
rv.cache_control.max_age = cache_timeout
rv.expires = int(time() + cache_timeout)
if add_etags and filename is not None:
try:
rv.set_etag('flask-%s-%s-%s' % (
os.path.getmtime(filename),
os.path.getsize(filename),
adler32(
filename.encode('utf-8') if isinstance(filename, text_type)
else filename
) & 0xffffffff
))
except OSError:
warn('Access %s failed, maybe it does not exist, so ignore etags in '
'headers' % filename, stacklevel=2)
if conditional:
rv = rv.make_conditional(request)
# make sure we don't send x-sendfile for servers that
# ignore the 304 status code for x-sendfile.
if rv.status_code == 304:
rv.headers.pop('x-sendfile', None)
return rv
def safe_join(directory, filename):
"""Safely join `directory` and `filename`.
Example usage::
@app.route('/wiki/<path:filename>')
def wiki_page(filename):
filename = safe_join(app.config['WIKI_FOLDER'], filename)
with open(filename, 'rb') as fd:
content = fd.read() # Read and process the file content...
:param directory: the base directory.
:param filename: the untrusted filename relative to that directory.
:raises: :class:`~werkzeug.exceptions.NotFound` if the resulting path
would fall out of `directory`.
"""
filename = posixpath.normpath(filename)
for sep in _os_alt_seps:
if sep in filename:
raise NotFound()
if os.path.isabs(filename) or \
filename == '..' or \
filename.startswith('../'):
raise NotFound()
return os.path.join(directory, filename)
def send_from_directory(directory, filename, **options):
"""Send a file from a given directory with :func:`send_file`. This
is a secure way to quickly expose static files from an upload folder
or something similar.
Example usage::
@app.route('/uploads/<path:filename>')
def download_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename, as_attachment=True)
.. admonition:: Sending files and Performance
It is strongly recommended to activate either `X-Sendfile` support in
your webserver or (if no authentication happens) to tell the webserver
to serve files for the given path on its own without calling into the
web application for improved performance.
.. versionadded:: 0.5
:param directory: the directory where all the files are stored.
:param filename: the filename relative to that directory to
download.
:param options: optional keyword arguments that are directly
forwarded to :func:`send_file`.
"""
filename = safe_join(directory, filename)
if not os.path.isabs(filename):
filename = os.path.join(current_app.root_path, filename)
if not os.path.isfile(filename):
raise NotFound()
options.setdefault('conditional', True)
return send_file(filename, **options)
def get_root_path(import_name):
"""Returns the path to a package or cwd if that cannot be found. This
returns the path of a package or the folder that contains a module.
Not to be confused with the package path returned by :func:`find_package`.
"""
# Module already imported and has a file attribute. Use that first.
mod = sys.modules.get(import_name)
if mod is not None and hasattr(mod, '__file__'):
return os.path.dirname(os.path.abspath(mod.__file__))
# Next attempt: check the loader.
loader = pkgutil.get_loader(import_name)
# Loader does not exist or we're referring to an unloaded main module
# or a main module without path (interactive sessions), go with the
# current working directory.
if loader is None or import_name == '__main__':
return os.getcwd()
# For .egg, zipimporter does not have get_filename until Python 2.7.
# Some other loaders might exhibit the same behavior.
if hasattr(loader, 'get_filename'):
filepath = loader.get_filename(import_name)
else:
# Fall back to imports.
__import__(import_name)
mod = sys.modules[import_name]
filepath = getattr(mod, '__file__', None)
# If we don't have a filepath it might be because we are a
# namespace package. In this case we pick the root path from the
# first module that is contained in our package.
if filepath is None:
raise RuntimeError('No root path can be found for the provided '
'module "%s". This can happen because the '
'module came from an import hook that does '
'not provide file name information or because '
'it\'s a namespace package. In this case '
'the root path needs to be explicitly '
'provided.' % import_name)
# filepath is import_name.py for a module, or __init__.py for a package.
return os.path.dirname(os.path.abspath(filepath))
def _matching_loader_thinks_module_is_package(loader, mod_name):
"""Given the loader that loaded a module and the module this function
attempts to figure out if the given module is actually a package.
"""
# If the loader can tell us if something is a package, we can
# directly ask the loader.
if hasattr(loader, 'is_package'):
return loader.is_package(mod_name)
# importlib's namespace loaders do not have this functionality but
# all the modules it loads are packages, so we can take advantage of
# this information.
elif (loader.__class__.__module__ == '_frozen_importlib' and
loader.__class__.__name__ == 'NamespaceLoader'):
return True
# Otherwise we need to fail with an error that explains what went
# wrong.
raise AttributeError(
('%s.is_package() method is missing but is required by Flask of '
'PEP 302 import hooks. If you do not use import hooks and '
'you encounter this error please file a bug against Flask.') %
loader.__class__.__name__)
def find_package(import_name):
"""Finds a package and returns the prefix (or None if the package is
not installed) as well as the folder that contains the package or
module as a tuple. The package path returned is the module that would
have to be added to the pythonpath in order to make it possible to
import the module. The prefix is the path below which a UNIX like
folder structure exists (lib, share etc.).
"""
root_mod_name = import_name.split('.')[0]
loader = pkgutil.get_loader(root_mod_name)
if loader is None or import_name == '__main__':
# import name is not found, or interactive/main module
package_path = os.getcwd()
else:
# For .egg, zipimporter does not have get_filename until Python 2.7.
if hasattr(loader, 'get_filename'):
filename = loader.get_filename(root_mod_name)
elif hasattr(loader, 'archive'):
# zipimporter's loader.archive points to the .egg or .zip
# archive filename is dropped in call to dirname below.
filename = loader.archive
else:
# At least one loader is missing both get_filename and archive:
# Google App Engine's HardenedModulesHook
#
# Fall back to imports.
__import__(import_name)
filename = sys.modules[import_name].__file__
package_path = os.path.abspath(os.path.dirname(filename))
# In case the root module is a package we need to chop of the
# rightmost part. This needs to go through a helper function
# because of python 3.3 namespace packages.
if _matching_loader_thinks_module_is_package(
loader, root_mod_name):
package_path = os.path.dirname(package_path)
site_parent, site_folder = os.path.split(package_path)
py_prefix = os.path.abspath(sys.prefix)
if package_path.startswith(py_prefix):
return py_prefix, package_path
elif site_folder.lower() == 'site-packages':
parent, folder = os.path.split(site_parent)
# Windows like installations
if folder.lower() == 'lib':
base_dir = parent
# UNIX like installations
elif os.path.basename(parent).lower() == 'lib':
base_dir = os.path.dirname(parent)
else:
base_dir = site_parent
return base_dir, package_path
return None, package_path
class locked_cached_property(object):
"""A decorator that converts a function into a lazy property. The
function wrapped is called the first time to retrieve the result
and then that calculated result is used the next time you access
the value. Works like the one in Werkzeug but has a lock for
thread safety.
"""
def __init__(self, func, name=None, doc=None):
self.__name__ = name or func.__name__
self.__module__ = func.__module__
self.__doc__ = doc or func.__doc__
self.func = func
self.lock = RLock()
def __get__(self, obj, type=None):
if obj is None:
return self
with self.lock:
value = obj.__dict__.get(self.__name__, _missing)
if value is _missing:
value = self.func(obj)
obj.__dict__[self.__name__] = value
return value
class _PackageBoundObject(object):
def __init__(self, import_name, template_folder=None, root_path=None):
#: The name of the package or module. Do not change this once
#: it was set by the constructor.
self.import_name = import_name
#: location of the templates. `None` if templates should not be
#: exposed.
self.template_folder = template_folder
if root_path is None:
root_path = get_root_path(self.import_name)
#: Where is the app root located?
self.root_path = root_path
self._static_folder = None
self._static_url_path = None
def _get_static_folder(self):
if self._static_folder is not None:
return os.path.join(self.root_path, self._static_folder)
def _set_static_folder(self, value):
self._static_folder = value
static_folder = property(_get_static_folder, _set_static_folder)
del _get_static_folder, _set_static_folder
def _get_static_url_path(self):
if self._static_url_path is None:
if self.static_folder is None:
return None
return '/' + os.path.basename(self.static_folder)
return self._static_url_path
def _set_static_url_path(self, value):
self._static_url_path = value
static_url_path = property(_get_static_url_path, _set_static_url_path)
del _get_static_url_path, _set_static_url_path
@property
def has_static_folder(self):
"""This is `True` if the package bound object's container has a
folder named ``'static'``.
.. versionadded:: 0.5
"""
return self.static_folder is not None
@locked_cached_property
def jinja_loader(self):
"""The Jinja loader for this package bound object.
.. versionadded:: 0.5
"""
if self.template_folder is not None:
return FileSystemLoader(os.path.join(self.root_path,
self.template_folder))
def get_send_file_max_age(self, filename):
"""Provides default cache_timeout for the :func:`send_file` functions.
By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from
the configuration of :data:`~flask.current_app`.
Static file functions such as :func:`send_from_directory` use this
function, and :func:`send_file` calls this function on
:data:`~flask.current_app` when the given cache_timeout is `None`. If a
cache_timeout is given in :func:`send_file`, that timeout is used;
otherwise, this method is called.
This allows subclasses to change the behavior when sending files based
on the filename. For example, to set the cache timeout for .js files
to 60 seconds::
class MyFlask(flask.Flask):
def get_send_file_max_age(self, name):
if name.lower().endswith('.js'):
return 60
return flask.Flask.get_send_file_max_age(self, name)
.. versionadded:: 0.9
"""
return current_app.config['SEND_FILE_MAX_AGE_DEFAULT']
def send_static_file(self, filename):
"""Function used internally to send static files from the static
folder to the browser.
.. versionadded:: 0.5
"""
if not self.has_static_folder:
raise RuntimeError('No static folder for this object')
# Ensure get_send_file_max_age is called in all cases.
# Here, we ensure get_send_file_max_age is called for Blueprints.
cache_timeout = self.get_send_file_max_age(filename)
return send_from_directory(self.static_folder, filename,
cache_timeout=cache_timeout)
def open_resource(self, resource, mode='rb'):
"""Opens a resource from the application's resource folder. To see
how this works, consider the following folder structure::
/myapplication.py
/schema.sql
/static
/style.css
/templates
/layout.html
/index.html
If you want to open the `schema.sql` file you would do the
following::
with app.open_resource('schema.sql') as f:
contents = f.read()
do_something_with(contents)
:param resource: the name of the resource. To access resources within
subfolders use forward slashes as separator.
:param mode: resource file opening mode, default is 'rb'.
"""
if mode not in ('r', 'rb'):
raise ValueError('Resources can only be opened for reading')
return open(os.path.join(self.root_path, resource), mode)
| {
"content_hash": "fde7e0942f6e5b74c531d970af8aae3c",
"timestamp": "",
"source": "github",
"line_count": 902,
"max_line_length": 83,
"avg_line_length": 40.45676274944567,
"alnum_prop": 0.628740545873068,
"repo_name": "FinixLei/flask",
"id": "9c85fa00e4a5b602b529ee81bd710e1312d11ef8",
"size": "36516",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "flask/helpers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from django.core.management.base import BaseCommand, CommandError
from ctd.models import CtdVariable, CtdSampleVolume, CtdBottleTrigger
from main.models import CtdCast, Person, Event, Leg
import csv
from django.db.utils import IntegrityError
from django.core.exceptions import ObjectDoesNotExist
import glob
import os
import re
# This file is part of https://github.com/cpina/science-cruise-data-management
#
# This project was programmed in a hurry without any prior Django experience,
# while circumnavigating the Antarctic on the ACE expedition, without proper
# Internet access, with 150 scientists using the system and doing at the same
# cruise other data management and system administration tasks.
#
# Sadly there aren't unit tests and we didn't have time to refactor the code
# during the cruise, which is really needed.
#
# Carles Pina (carles@pina.cat) and Jen Thomas (jenny_t152@yahoo.co.uk), 2016-2017.
class Command(BaseCommand):
help = 'Import CTD sheets'
def add_arguments(self, parser):
parser.add_argument('basedirectory', type=str)
def handle(self, *args, **options):
basedirectory = options['basedirectory']
if os.path.isdir(basedirectory):
for filename in glob.glob(os.path.join(basedirectory, "*")):
import_ctd_sheet(filename)
else:
import_ctd_sheet(basedirectory)
def read_all(csv_file):
file = []
for line in csv_file:
file.append(line)
return file
def col_letter_to_index(column):
# only works to Z
return ord(column.upper()) - ord('A')
def import_ctd_variables(all_file):
column = first_column_data(all_file)
row = row_for_depth(all_file)
while True:
variable_name = str_row_col(all_file, row, column).lower()
if variable_name == "available vol (l)" or variable_name == "available volume (l)":
break
ctd_variable = CtdVariable()
ctd_variable.name = variable_name
try:
ctd_variable.save()
except IntegrityError:
pass
column = chr(ord(column)+1)
def row_for_depth(all_file):
row_number = 1
for row in all_file:
text = row[0].replace(" ", "").lower()
if text == "depth(m)" or text == "triggerdepth(m)" or text == "actualdepth(m)":
return row_number
row_number += 1
assert False
def calculate_column_index_for_comment(all_file):
headers_row = row_for_depth(all_file)
row = all_file[headers_row-1]
for (i, cell) in enumerate(row):
if cell == "Comments":
return i
print("Comments column not found")
assert False
def column_for_niskin_number(all_file):
headers_row = row_for_depth(all_file)
row = all_file[headers_row-1]
column = 'A'
for cell in row:
if cell == "Niskin #":
return column
column = chr(ord(column)+1)
return column
def first_column_data(all_file):
headers_row = row_for_depth(all_file)
row = all_file[headers_row-1]
column = 'A'
previous_cell = None
for cell in row:
if cell != "" and previous_cell == "":
return column
elif cell == "Bottle Integrity":
return chr(ord(column)+1)
previous_cell = cell
column = chr(ord(column) + 1)
return column
def import_ctd_sample_variables(all_file, ctd_cast):
column_niskin_numbers = column_for_niskin_number(all_file)
niskin = last_int_row_col(all_file, row_for_depth(all_file)+1, column_niskin_numbers)
while True:
row = row_for_depth(all_file) + niskin
niskin_cell_contents = str_row_col(all_file, row, column_niskin_numbers)
# some niskin bottles might have comments
niskin_cell_contents = re.sub("[^0-9]", "", niskin_cell_contents)
assert str(niskin) == str(niskin_cell_contents)
if row >= len(all_file) or \
last_str_row_col(all_file, row, 'C') != last_str_row_col(all_file, row, 'C') != str(niskin) or \
last_str_row_col(all_file, row, 'C') == "":
break
ctd_bottle_trigger = CtdBottleTrigger()
ctd_bottle_trigger.ctd_cast = ctd_cast
depth_triggered = all_file[row][col_letter_to_index('A')]
depth_planned = all_file[row][col_letter_to_index('B')]
column_index_for_comment = calculate_column_index_for_comment(all_file)
comment = all_file[row][column_index_for_comment]
try:
depth = int(depth_triggered)
except ValueError:
try:
depth = int(depth_planned)
except ValueError:
# end of sample sheet
break
ctd_bottle_trigger.depth = depth
ctd_bottle_trigger.niskin = niskin
if comment != "":
ctd_bottle_trigger.comment = comment
try:
ctd_bottle_trigger.save()
except IntegrityError:
ctd_bottle_trigger = CtdBottleTrigger.objects.filter(ctd_cast=ctd_cast).filter(depth=depth).filter(niskin=niskin)[0]
ctd_bottle_trigger.comment = comment
ctd_bottle_trigger.save()
col_index = col_letter_to_index(first_column_data(all_file))
while True:
volume = all_file[row][col_index]
variable = all_file[row_for_depth(all_file)-1][col_index]
lowercase_variable = variable.lower()
if lowercase_variable == "available vol (l)" or lowercase_variable == "available volume (l)":
break
ctd_sample_volume = CtdSampleVolume()
ctd_sample_volume.ctd_bottle_trigger = ctd_bottle_trigger
ctd_variable = CtdVariable.objects.get(name=variable)
ctd_sample_volume.ctd_variable = ctd_variable
if volume == 'x':
volume = None
ctd_sample_volume_to_be_saved = True
elif volume == '':
volume = None
ctd_sample_volume_to_be_saved = False
else:
volume = float(volume)
ctd_sample_volume_to_be_saved = True
ctd_sample_volume.volume = volume
if ctd_sample_volume_to_be_saved:
ctd_sample_volume.ctd_bottle_trigger = ctd_bottle_trigger
try:
ctd_sample_volume.save()
except IntegrityError:
ctd_sample_volume = CtdSampleVolume.objects.filter(ctd_bottle_trigger=ctd_bottle_trigger).filter(ctd_variable=ctd_variable)
ctd_sample_volume.volume = volume
col_index += 1
niskin += 1
def text_to_person(text):
return None
def last_str_row_col(all_file, row, col):
assert row-1 >= 0
assert col_letter_to_index(col) >= 0
return all_file[row-1][col_letter_to_index(col)].split(" ")[-1]
def str_row_col(all_file, row, col):
assert row-1 >= 0
assert col_letter_to_index(col) >= 0
return all_file[row-1][col_letter_to_index(col)]
def last_int_row_col(all_file, row, col):
string = last_str_row_col(all_file, row, col)
try:
int(string)
except ValueError:
assert False
return int(last_str_row_col(all_file, row, col))
def name_to_person(name):
name = name.replace(" ", "").lower()
print(name)
if "_________" in name:
return None
elif name == "thalia" or name == "tahlia":
name_last = "Henry"
elif name == "mnh":
name_last = "Houssais"
elif name == "Jenny":
name_last = "Hutchings"
elif name == "":
return None
else:
print("Can't find the name _{}_".format(name))
assert False
return Person.objects.get(name_last=name_last)
def find_string(all_csv, label, none_if_not_found=False):
for row in all_csv:
for cell in row:
if cell.startswith(label):
return cell[len(label):].strip()
if none_if_not_found:
return None
else:
assert False
def create_ctd_cast(all_file, filename):
ctd_cast = CtdCast()
# ctd_cast.ctd_cast_number = last_int_row_col(all_file, 2, 'A')
# ctd_cast.event_number = Event.objects.all().get(number=last_int_row_col(all_file, 4, 'A'))
# ctd_cast.ctd_operator = name_to_person(last_str_row_col(all_file, 5, 'A'))
#
# ctd_cast.ctd_file_name = last_str_row_col(all_file, 6, 'A')
# ctd_cast.ice_coverage = last_int_row_col(all_file, 5, 'F')
ctd_cast.ctd_cast_number = find_string(all_file, 'Cast #')
ctd_cast.event_number = Event.objects.all().get(number=find_string(all_file, "Event #"))
ctd_cast.ctd_operator = name_to_person(find_string(all_file, "CTD Operator"))
ctd_cast.ctd_file_name = find_string(all_file, "CTD file:", none_if_not_found=True)
if "leg1" in filename.lower():
ctd_cast.leg_number = Leg.objects.all().get(number=1)
if "leg2" in filename.lower():
ctd_cast.leg_number = Leg.objects.all().get(number=2)
elif "leg3" in filename.lower():
ctd_cast.leg_number = Leg.objects.all().get(number=3)
else:
assert False
try:
ctd_cast.save()
except IntegrityError:
print("Integrity error:", ctd_cast)
return ctd_cast
def import_ctd_sheet(filename):
print("Filename:", filename)
fp = open(filename, "r")
csv_file = csv.reader(fp)
all_file = read_all(csv_file)
import_ctd_variables(all_file)
event_number = find_string(all_file, "Event #")
try:
ctd_cast = CtdCast.objects.get(event_number=event_number)
except ObjectDoesNotExist:
ctd_cast = create_ctd_cast(all_file, filename)
import_ctd_sample_variables(all_file, ctd_cast)
| {
"content_hash": "368fc9eea3113bc5fda7cf283f47db3f",
"timestamp": "",
"source": "github",
"line_count": 329,
"max_line_length": 143,
"avg_line_length": 29.544072948328267,
"alnum_prop": 0.608127572016461,
"repo_name": "cpina/science-cruise-data-management",
"id": "634bd924ab6c010b425264d9ab2ddc2015435a77",
"size": "9720",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ScienceCruiseDataManagement/ctd/management/commands/importctdsheets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "59966"
},
{
"name": "HTML",
"bytes": "50774"
},
{
"name": "JavaScript",
"bytes": "106205"
},
{
"name": "Python",
"bytes": "548151"
},
{
"name": "Shell",
"bytes": "106"
}
],
"symlink_target": ""
} |
"""Network Hosts are responsible for allocating ips and setting up network.
There are multiple backend drivers that handle specific types of networking
topologies. All of the network commands are issued to a subclass of
:class:`NetworkManager`.
"""
import datetime
import functools
import itertools
import math
import re
import uuid
import eventlet
import netaddr
from oslo.config import cfg
from oslo import messaging
from oslo.utils import excutils
from oslo.utils import importutils
from oslo.utils import netutils
from oslo.utils import strutils
from oslo.utils import timeutils
from nova import conductor
from nova import context
from nova import exception
from nova.i18n import _, _LE, _LW
from nova import ipv6
from nova import manager
from nova.network import api as network_api
from nova.network import driver
from nova.network import floating_ips
from nova.network import model as network_model
from nova.network import rpcapi as network_rpcapi
from nova import objects
from nova.objects import base as obj_base
from nova.objects import quotas as quotas_obj
from nova.openstack.common import log as logging
from nova.openstack.common import periodic_task
from nova.openstack.common import uuidutils
from nova import servicegroup
from nova import utils
LOG = logging.getLogger(__name__)
network_opts = [
cfg.StrOpt('flat_network_bridge',
help='Bridge for simple network instances'),
cfg.StrOpt('flat_network_dns',
default='8.8.4.4',
help='DNS server for simple network'),
cfg.BoolOpt('flat_injected',
default=False,
help='Whether to attempt to inject network setup into guest'),
cfg.StrOpt('flat_interface',
help='FlatDhcp will bridge into this interface if set'),
cfg.IntOpt('vlan_start',
default=100,
help='First VLAN for private networks'),
cfg.StrOpt('vlan_interface',
help='VLANs will bridge into this interface if set'),
cfg.IntOpt('num_networks',
default=1,
help='Number of networks to support'),
cfg.StrOpt('vpn_ip',
default='$my_ip',
help='Public IP for the cloudpipe VPN servers'),
cfg.IntOpt('vpn_start',
default=1000,
help='First Vpn port for private networks'),
cfg.IntOpt('network_size',
default=256,
help='Number of addresses in each private subnet'),
cfg.StrOpt('fixed_range_v6',
default='fd00::/48',
help='Fixed IPv6 address block'),
cfg.StrOpt('gateway',
help='Default IPv4 gateway'),
cfg.StrOpt('gateway_v6',
help='Default IPv6 gateway'),
cfg.IntOpt('cnt_vpn_clients',
default=0,
help='Number of addresses reserved for vpn clients'),
cfg.IntOpt('fixed_ip_disassociate_timeout',
default=600,
help='Seconds after which a deallocated IP is disassociated'),
cfg.IntOpt('create_unique_mac_address_attempts',
default=5,
help='Number of attempts to create unique mac address'),
cfg.BoolOpt('fake_call',
default=False,
help='If True, skip using the queue and make local calls'),
cfg.BoolOpt('teardown_unused_network_gateway',
default=False,
help='If True, unused gateway devices (VLAN and bridge) are '
'deleted in VLAN network mode with multi hosted '
'networks'),
cfg.BoolOpt('force_dhcp_release',
default=True,
help='If True, send a dhcp release on instance termination'),
cfg.BoolOpt('update_dns_entries',
default=False,
help='If True, when a DNS entry must be updated, it sends a '
'fanout cast to all network hosts to update their DNS '
'entries in multi host mode'),
cfg.IntOpt("dns_update_periodic_interval",
default=-1,
help='Number of seconds to wait between runs of updates to DNS '
'entries.'),
cfg.StrOpt('dhcp_domain',
default='novalocal',
help='Domain to use for building the hostnames'),
cfg.StrOpt('l3_lib',
default='nova.network.l3.LinuxNetL3',
help="Indicates underlying L3 management library"),
]
CONF = cfg.CONF
CONF.register_opts(network_opts)
CONF.import_opt('use_ipv6', 'nova.netconf')
CONF.import_opt('my_ip', 'nova.netconf')
CONF.import_opt('network_topic', 'nova.network.rpcapi')
CONF.import_opt('fake_network', 'nova.network.linux_net')
CONF.import_opt('share_dhcp_address', 'nova.objects.network')
CONF.import_opt('network_device_mtu', 'nova.objects.network')
class RPCAllocateFixedIP(object):
"""Mixin class originally for FlatDCHP and VLAN network managers.
used since they share code to RPC.call allocate_fixed_ip on the
correct network host to configure dnsmasq
"""
servicegroup_api = None
def _allocate_fixed_ips(self, context, instance_id, host, networks,
**kwargs):
"""Calls allocate_fixed_ip once for each network."""
green_threads = []
vpn = kwargs.get('vpn')
requested_networks = kwargs.get('requested_networks')
addresses_by_network = {}
if requested_networks is not None:
for request in requested_networks:
addresses_by_network[request.network_id] = request.address
for network in networks:
if 'uuid' in network and network['uuid'] in addresses_by_network:
address = addresses_by_network[network['uuid']]
else:
address = None
# NOTE(vish): if we are not multi_host pass to the network host
# NOTE(tr3buchet): but if we are, host came from instance['host']
if not network['multi_host']:
host = network['host']
# NOTE(vish): if there is no network host, set one
if host is None:
network_p = obj_base.obj_to_primitive(network)
host = self.network_rpcapi.set_network_host(context,
network_p)
if host != self.host:
# need to call allocate_fixed_ip to correct network host
green_threads.append(eventlet.spawn(
self.network_rpcapi._rpc_allocate_fixed_ip,
context, instance_id, network['id'], address, vpn,
host))
else:
# i am the correct host, run here
self.allocate_fixed_ip(context, instance_id, network,
vpn=vpn, address=address)
# wait for all of the allocates (if any) to finish
for gt in green_threads:
gt.wait()
def _rpc_allocate_fixed_ip(self, context, instance_id, network_id,
**kwargs):
"""Sits in between _allocate_fixed_ips and allocate_fixed_ip to
perform network lookup on the far side of rpc.
"""
network = self._get_network_by_id(context, network_id)
return self.allocate_fixed_ip(context, instance_id, network, **kwargs)
def deallocate_fixed_ip(self, context, address, host=None, teardown=True,
instance=None):
"""Call the superclass deallocate_fixed_ip if i'm the correct host
otherwise call to the correct host
"""
fixed_ip = objects.FixedIP.get_by_address(
context, address, expected_attrs=['network'])
network = fixed_ip.network
# NOTE(vish): if we are not multi_host pass to the network host
# NOTE(tr3buchet): but if we are, host came from instance['host']
if not network.multi_host:
host = network.host
if host == self.host:
# NOTE(vish): deallocate the fixed ip locally
return super(RPCAllocateFixedIP, self).deallocate_fixed_ip(context,
address, instance=instance)
if network.multi_host:
service = objects.Service.get_by_host_and_topic(
context, host, CONF.network_topic)
if not service or not self.servicegroup_api.service_is_up(service):
# NOTE(vish): deallocate the fixed ip locally but don't
# teardown network devices
return super(RPCAllocateFixedIP, self).deallocate_fixed_ip(
context, address, teardown=False, instance=instance)
self.network_rpcapi.deallocate_fixed_ip(context, address, host,
instance)
class NetworkManager(manager.Manager):
"""Implements common network manager functionality.
This class must be subclassed to support specific topologies.
host management:
hosts configure themselves for networks they are assigned to in the
table upon startup. If there are networks in the table which do not
have hosts, those will be filled in and have hosts configured
as the hosts pick them up one at time during their periodic task.
The one at a time part is to flatten the layout to help scale
"""
target = messaging.Target(version='1.13')
# If True, this manager requires VIF to create a bridge.
SHOULD_CREATE_BRIDGE = False
# If True, this manager requires VIF to create VLAN tag.
SHOULD_CREATE_VLAN = False
# if True, this manager leverages DHCP
DHCP = False
timeout_fixed_ips = True
required_create_args = []
def __init__(self, network_driver=None, *args, **kwargs):
self.driver = driver.load_network_driver(network_driver)
self.instance_dns_manager = importutils.import_object(
CONF.instance_dns_manager)
self.instance_dns_domain = CONF.instance_dns_domain
self.floating_dns_manager = importutils.import_object(
CONF.floating_ip_dns_manager)
self.network_api = network_api.API()
self.network_rpcapi = network_rpcapi.NetworkAPI()
self.conductor_api = conductor.API()
self.servicegroup_api = servicegroup.API()
l3_lib = kwargs.get("l3_lib", CONF.l3_lib)
self.l3driver = importutils.import_object(l3_lib)
self.quotas_cls = objects.Quotas
super(NetworkManager, self).__init__(service_name='network',
*args, **kwargs)
@staticmethod
def _uses_shared_ip(network):
shared = network.get('share_address') or CONF.share_dhcp_address
return not network.get('multi_host') or shared
@utils.synchronized('get_dhcp')
def _get_dhcp_ip(self, context, network_ref, host=None):
"""Get the proper dhcp address to listen on."""
# NOTE(vish): If we are sharing the dhcp_address then we can just
# return the dhcp_server from the database.
if self._uses_shared_ip(network_ref):
return network_ref.get('dhcp_server') or network_ref['gateway']
if not host:
host = self.host
network_id = network_ref['id']
try:
fip = objects.FixedIP.get_by_network_and_host(context,
network_id,
host)
return fip.address
except exception.FixedIpNotFoundForNetworkHost:
elevated = context.elevated()
fip = objects.FixedIP.associate_pool(elevated,
network_id,
host=host)
return fip.address
def get_dhcp_leases(self, ctxt, network_ref):
"""Broker the request to the driver to fetch the dhcp leases."""
LOG.debug('Get DHCP leases for network %s', network_ref['uuid'])
return self.driver.get_dhcp_leases(ctxt, network_ref)
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
# NOTE(vish): Set up networks for which this host already has
# an ip address.
ctxt = context.get_admin_context()
for network in objects.NetworkList.get_by_host(ctxt, self.host):
LOG.debug('Setup network %s on host %s', network['uuid'],
self.host)
self._setup_network_on_host(ctxt, network)
if CONF.update_dns_entries:
LOG.debug('Update DNS on network %s for host %s',
network['uuid'], self.host)
dev = self.driver.get_dev(network)
self.driver.update_dns(ctxt, dev, network)
@periodic_task.periodic_task
def _disassociate_stale_fixed_ips(self, context):
if self.timeout_fixed_ips:
now = timeutils.utcnow()
timeout = CONF.fixed_ip_disassociate_timeout
time = now - datetime.timedelta(seconds=timeout)
num = objects.FixedIP.disassociate_all_by_timeout(context,
self.host,
time)
if num:
LOG.debug('Disassociated %s stale fixed ip(s)', num)
def set_network_host(self, context, network_ref):
"""Safely sets the host of the network."""
if not isinstance(network_ref, obj_base.NovaObject):
network_ref = objects.Network._from_db_object(
context, objects.Network(), network_ref)
LOG.debug('Setting host %s for network %s', self.host,
network_ref['uuid'], context=context)
network_ref.host = self.host
network_ref.save()
return self.host
def _do_trigger_security_group_members_refresh_for_instance(self,
instance_id):
# NOTE(francois.charlier): the instance may have been deleted already
# thus enabling `read_deleted`
admin_context = context.get_admin_context(read_deleted='yes')
instance = objects.Instance.get_by_uuid(admin_context, instance_id)
try:
# NOTE(vish): We need to make sure the instance info cache has been
# updated with new ip info before we trigger the
# security group refresh. This is somewhat inefficient
# but avoids doing some dangerous refactoring for a
# bug fix.
nw_info = self.get_instance_nw_info(admin_context, instance_id,
None, None)
ic = objects.InstanceInfoCache.new(admin_context, instance_id)
ic.network_info = nw_info
ic.save(update_cells=False)
except exception.InstanceInfoCacheNotFound:
pass
groups = instance.security_groups
group_ids = [group.id for group in groups]
self.conductor_api.security_groups_trigger_members_refresh(
admin_context, group_ids)
# NOTE(hanlind): This method can be removed in version 2.0 of the RPC API
def get_instance_uuids_by_ip_filter(self, context, filters):
fixed_ip_filter = filters.get('fixed_ip')
ip_filter = re.compile(str(filters.get('ip')))
ipv6_filter = re.compile(str(filters.get('ip6')))
LOG.debug('Get instance uuids by IP filters. Fixed IP filter: %s. '
'IP filter: %s. IPv6 filter: %s', fixed_ip_filter,
str(filters.get('ip')), str(filters.get('ip6')))
# NOTE(jkoelker) Should probably figure out a better way to do
# this. But for now it "works", this could suck on
# large installs.
vifs = objects.VirtualInterfaceList.get_all(context)
results = []
for vif in vifs:
if vif.instance_uuid is None:
continue
network = self._get_network_by_id(context, vif.network_id)
fixed_ipv6 = None
if network['cidr_v6'] is not None:
fixed_ipv6 = ipv6.to_global(network['cidr_v6'],
vif.address,
context.project_id)
if fixed_ipv6 and ipv6_filter.match(fixed_ipv6):
results.append({'instance_uuid': vif.instance_uuid,
'ip': fixed_ipv6})
fixed_ips = objects.FixedIPList.get_by_virtual_interface_id(
context, vif.id)
for fixed_ip in fixed_ips:
if not fixed_ip or not fixed_ip.address:
continue
if str(fixed_ip.address) == fixed_ip_filter:
results.append({'instance_uuid': vif.instance_uuid,
'ip': fixed_ip.address})
continue
if ip_filter.match(str(fixed_ip.address)):
results.append({'instance_uuid': vif.instance_uuid,
'ip': fixed_ip.address})
continue
for floating_ip in fixed_ip.floating_ips:
if not floating_ip or not floating_ip.address:
continue
if ip_filter.match(str(floating_ip.address)):
results.append({'instance_uuid': vif.instance_uuid,
'ip': floating_ip.address})
continue
return results
def _get_networks_for_instance(self, context, instance_id, project_id,
requested_networks=None):
"""Determine & return which networks an instance should connect to."""
# TODO(tr3buchet) maybe this needs to be updated in the future if
# there is a better way to determine which networks
# a non-vlan instance should connect to
if requested_networks is not None and len(requested_networks) != 0:
network_uuids = [request.network_id
for request in requested_networks]
networks = self._get_networks_by_uuids(context, network_uuids)
else:
try:
networks = objects.NetworkList.get_all(context)
except exception.NoNetworksFound:
return []
# return only networks which are not vlan networks
return [network for network in networks if not network.vlan]
def allocate_for_instance(self, context, **kwargs):
"""Handles allocating the various network resources for an instance.
rpc.called by network_api
"""
instance_uuid = kwargs['instance_id']
if not uuidutils.is_uuid_like(instance_uuid):
instance_uuid = kwargs.get('instance_uuid')
host = kwargs['host']
project_id = kwargs['project_id']
rxtx_factor = kwargs['rxtx_factor']
requested_networks = kwargs.get('requested_networks')
if (requested_networks and
not isinstance(requested_networks,
objects.NetworkRequestList)):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest.from_tuple(t)
for t in requested_networks])
vpn = kwargs['vpn']
macs = kwargs['macs']
admin_context = context.elevated()
LOG.debug("Allocate network for instance", instance_uuid=instance_uuid,
context=context)
networks = self._get_networks_for_instance(context,
instance_uuid, project_id,
requested_networks=requested_networks)
networks_list = [self._get_network_dict(network)
for network in networks]
LOG.debug('Networks retrieved for instance: |%s|',
networks_list, context=context, instance_uuid=instance_uuid)
try:
self._allocate_mac_addresses(admin_context, instance_uuid,
networks, macs)
except Exception:
with excutils.save_and_reraise_exception():
# If we fail to allocate any one mac address, clean up all
# allocated VIFs
objects.VirtualInterface.delete_by_instance_uuid(
context, instance_uuid)
self._allocate_fixed_ips(admin_context, instance_uuid,
host, networks, vpn=vpn,
requested_networks=requested_networks)
if CONF.update_dns_entries:
network_ids = [network['id'] for network in networks]
self.network_rpcapi.update_dns(context, network_ids)
return self.get_instance_nw_info(admin_context, instance_uuid,
rxtx_factor, host)
def deallocate_for_instance(self, context, **kwargs):
"""Handles deallocating various network resources for an instance.
rpc.called by network_api
kwargs can contain fixed_ips to circumvent another db lookup
"""
# NOTE(francois.charlier): in some cases the instance might be
# deleted before the IPs are released, so we need to get deleted
# instances too
read_deleted_context = context.elevated(read_deleted='yes')
if 'instance' in kwargs:
instance = kwargs['instance']
instance_uuid = instance.uuid
host = instance.host
else:
instance_id = kwargs['instance_id']
if uuidutils.is_uuid_like(instance_id):
instance = objects.Instance.get_by_uuid(
read_deleted_context, instance_id)
else:
instance = objects.Instance.get_by_id(
read_deleted_context, instance_id)
# NOTE(russellb) in case instance_id was an ID and not UUID
instance_uuid = instance.uuid
host = kwargs.get('host')
try:
requested_networks = kwargs.get('requested_networks')
if requested_networks:
# NOTE(obondarev): Temporary and transitional
if isinstance(requested_networks, objects.NetworkRequestList):
requested_networks = requested_networks.as_tuples()
fixed_ips = [ip for (net_id, ip) in requested_networks]
else:
fixed_ip_list = objects.FixedIPList.get_by_instance_uuid(
read_deleted_context, instance_uuid)
fixed_ips = [str(ip.address) for ip in fixed_ip_list]
except exception.FixedIpNotFoundForInstance:
fixed_ips = []
LOG.debug("Network deallocation for instance",
context=context, instance_uuid=instance_uuid)
# deallocate fixed ips
for fixed_ip in fixed_ips:
self.deallocate_fixed_ip(context, fixed_ip, host=host,
instance=instance)
if CONF.update_dns_entries:
network_ids = [fixed_ip.network_id for fixed_ip in fixed_ips]
self.network_rpcapi.update_dns(context, network_ids)
# deallocate vifs (mac addresses)
objects.VirtualInterface.delete_by_instance_uuid(
read_deleted_context, instance_uuid)
@messaging.expected_exceptions(exception.InstanceNotFound)
def get_instance_nw_info(self, context, instance_id, rxtx_factor,
host, instance_uuid=None, **kwargs):
"""Creates network info list for instance.
called by allocate_for_instance and network_api
context needs to be elevated
:returns: network info list [(network,info),(network,info)...]
where network = dict containing pertinent data from a network db object
and info = dict containing pertinent networking data
"""
if not uuidutils.is_uuid_like(instance_id):
instance_id = instance_uuid
instance_uuid = instance_id
LOG.debug('Get instance network info', instance_uuid=instance_uuid)
try:
fixed_ips = objects.FixedIPList.get_by_instance_uuid(
context, instance_uuid)
except exception.FixedIpNotFoundForInstance:
fixed_ips = []
nw_info = network_model.NetworkInfo()
vifs = {}
for fixed_ip in fixed_ips:
vif = fixed_ip.virtual_interface
if not vif:
continue
if not fixed_ip.network:
continue
if vif.uuid in vifs:
current = vifs[vif.uuid]
else:
current = {
'id': vif.uuid,
'type': network_model.VIF_TYPE_BRIDGE,
'address': vif.address,
}
vifs[vif.uuid] = current
net_dict = self._get_network_dict(fixed_ip.network)
network = network_model.Network(**net_dict)
subnets = self._get_subnets_from_network(context,
fixed_ip.network,
host)
network['subnets'] = subnets
current['network'] = network
try:
current['rxtx_cap'] = (fixed_ip.network['rxtx_base'] *
rxtx_factor)
except (TypeError, KeyError):
pass
if fixed_ip.network.cidr_v6 and vif.address:
# NOTE(vish): I strongy suspect the v6 subnet is not used
# anywhere, but support it just in case
# add the v6 address to the v6 subnet
address = ipv6.to_global(fixed_ip.network.cidr_v6,
vif.address,
fixed_ip.network.project_id)
model_ip = network_model.FixedIP(address=address)
current['network']['subnets'][1]['ips'].append(model_ip)
# add the v4 address to the v4 subnet
model_ip = network_model.FixedIP(address=str(fixed_ip.address))
for ip in fixed_ip.floating_ips:
floating_ip = network_model.IP(address=str(ip['address']),
type='floating')
model_ip.add_floating_ip(floating_ip)
current['network']['subnets'][0]['ips'].append(model_ip)
for vif in vifs.values():
nw_info.append(network_model.VIF(**vif))
LOG.debug('Built network info: |%s|', nw_info)
return nw_info
@staticmethod
def _get_network_dict(network):
"""Returns the dict representing necessary and meta network fields."""
# get generic network fields
network_dict = {'id': network['uuid'],
'bridge': network['bridge'],
'label': network['label'],
'tenant_id': network['project_id']}
# get extra information
if network.get('injected'):
network_dict['injected'] = network['injected']
return network_dict
@staticmethod
def _extract_subnets(network):
"""Returns information about the IPv4 and IPv6 subnets
associated with a Neutron Network UUID.
"""
subnet_v4 = {
'network_id': network.uuid,
'cidr': network.cidr,
'gateway': network.gateway,
'dhcp_server': getattr(network, 'dhcp_server'),
'broadcast': network.broadcast,
'netmask': network.netmask,
'version': 4,
'dns1': network.dns1,
'dns2': network.dns2}
# TODO(tr3buchet): I'm noticing we've assumed here that all dns is v4.
# this is probably bad as there is no way to add v6
# dns to nova
subnet_v6 = {
'network_id': network.uuid,
'cidr': network.cidr_v6,
'gateway': network.gateway_v6,
'dhcp_server': None,
'broadcast': None,
'netmask': network.netmask_v6,
'version': 6,
'dns1': None,
'dns2': None}
def ips_to_strs(net):
for key, value in net.items():
if isinstance(value, netaddr.ip.BaseIP):
net[key] = str(value)
return net
return [ips_to_strs(subnet_v4), ips_to_strs(subnet_v6)]
def _get_subnets_from_network(self, context, network, instance_host=None):
"""Returns the 1 or 2 possible subnets for a nova network."""
extracted_subnets = self._extract_subnets(network)
subnets = []
for subnet in extracted_subnets:
subnet_dict = {'cidr': subnet['cidr'],
'gateway': network_model.IP(
address=subnet['gateway'],
type='gateway')}
# deal with dhcp
if self.DHCP:
if network.get('multi_host'):
dhcp_server = self._get_dhcp_ip(context, network,
instance_host)
else:
dhcp_server = self._get_dhcp_ip(context, subnet)
subnet_dict['dhcp_server'] = dhcp_server
subnet_object = network_model.Subnet(**subnet_dict)
# add dns info
for k in ['dns1', 'dns2']:
if subnet.get(k):
subnet_object.add_dns(
network_model.IP(address=subnet[k], type='dns'))
subnet_object['ips'] = []
subnets.append(subnet_object)
return subnets
def _allocate_mac_addresses(self, context, instance_uuid, networks, macs):
"""Generates mac addresses and creates vif rows in db for them."""
# make a copy we can mutate
if macs is not None:
available_macs = set(macs)
for network in networks:
if macs is None:
self._add_virtual_interface(context, instance_uuid,
network['id'])
else:
try:
mac = available_macs.pop()
except KeyError:
raise exception.VirtualInterfaceCreateException()
self._add_virtual_interface(context, instance_uuid,
network['id'], mac)
def _add_virtual_interface(self, context, instance_uuid, network_id,
mac=None):
attempts = 1 if mac else CONF.create_unique_mac_address_attempts
for i in range(attempts):
try:
vif = objects.VirtualInterface(context)
vif.address = mac or utils.generate_mac_address()
vif.instance_uuid = instance_uuid
vif.network_id = network_id
vif.uuid = str(uuid.uuid4())
vif.create()
return vif
except exception.VirtualInterfaceCreateException:
# Try again up to max number of attempts
pass
raise exception.VirtualInterfaceMacAddressException()
def add_fixed_ip_to_instance(self, context, instance_id, host, network_id,
rxtx_factor=None):
"""Adds a fixed ip to an instance from specified network."""
if uuidutils.is_uuid_like(network_id):
network = self.get_network(context, network_id)
else:
network = self._get_network_by_id(context, network_id)
LOG.debug('Add fixed ip on network %s', network['uuid'],
instance_uuid=instance_id)
self._allocate_fixed_ips(context, instance_id, host, [network])
return self.get_instance_nw_info(context, instance_id, rxtx_factor,
host)
# NOTE(russellb) This method can be removed in 2.0 of this API. It is
# deprecated in favor of the method in the base API.
def get_backdoor_port(self, context):
"""Return backdoor port for eventlet_backdoor."""
return self.backdoor_port
def remove_fixed_ip_from_instance(self, context, instance_id, host,
address, rxtx_factor=None):
"""Removes a fixed ip from an instance from specified network."""
LOG.debug('Remove fixed ip %s', address, instance_uuid=instance_id)
fixed_ips = objects.FixedIPList.get_by_instance_uuid(context,
instance_id)
for fixed_ip in fixed_ips:
if str(fixed_ip.address) == address:
self.deallocate_fixed_ip(context, address, host)
# NOTE(vish): this probably isn't a dhcp ip so just
# deallocate it now. In the extremely rare
# case that this is a race condition, we
# will just get a warn in lease or release.
if not fixed_ip.leased:
fixed_ip.disassociate()
return self.get_instance_nw_info(context, instance_id,
rxtx_factor, host)
raise exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=instance_id, ip=address)
def _validate_instance_zone_for_dns_domain(self, context, instance):
if not self.instance_dns_domain:
return True
instance_domain = self.instance_dns_domain
domainref = objects.DNSDomain.get_by_domain(context, instance_domain)
if domainref is None:
LOG.warning(_LW('instance-dns-zone not found |%s|.'),
instance_domain, instance=instance)
return True
dns_zone = domainref.availability_zone
instance_zone = instance.get('availability_zone')
if dns_zone and (dns_zone != instance_zone):
LOG.warning(_LW('instance-dns-zone is |%(domain)s|, '
'which is in availability zone |%(zone)s|. '
'Instance is in zone |%(zone2)s|. '
'No DNS record will be created.'),
{'domain': instance_domain,
'zone': dns_zone,
'zone2': instance_zone},
instance=instance)
return False
else:
return True
def allocate_fixed_ip(self, context, instance_id, network, **kwargs):
"""Gets a fixed ip from the pool."""
# TODO(vish): when this is called by compute, we can associate compute
# with a network, or a cluster of computes with a network
# and use that network here with a method like
# network_get_by_compute_host
address = None
# NOTE(vish) This db query could be removed if we pass az and name
# (or the whole instance object).
instance = objects.Instance.get_by_uuid(context, instance_id)
LOG.debug('Allocate fixed ip on network %s', network['uuid'],
instance=instance)
# A list of cleanup functions to call on error
cleanup = []
# Check the quota; can't put this in the API because we get
# called into from other places
quotas = self.quotas_cls()
quota_project, quota_user = quotas_obj.ids_from_instance(context,
instance)
try:
quotas.reserve(context, fixed_ips=1, project_id=quota_project,
user_id=quota_user)
cleanup.append(functools.partial(quotas.rollback, context))
except exception.OverQuota as exc:
quotas = exc.kwargs['quotas']
headroom = exc.kwargs['headroom']
allowed = quotas['fixed_ips']
used = allowed - headroom['fixed_ips']
LOG.warning(_LW("Quota exceeded for project %(pid)s, tried to "
"allocate fixed IP. %(used)s of %(allowed)s are "
"in use or are already reserved."),
{'pid': quota_project, 'used': used,
'allowed': allowed},
instance_uuid=instance_id)
raise exception.FixedIpLimitExceeded()
try:
if network['cidr']:
address = kwargs.get('address', None)
if address:
LOG.debug('Associating instance with specified fixed IP '
'%(address)s in network %(network)s on subnet '
'%(cidr)s.' %
{'address': address, 'network': network['id'],
'cidr': network['cidr']},
instance=instance)
fip = objects.FixedIP.associate(context,
str(address),
instance_id,
network['id'])
else:
LOG.debug('Associating instance with fixed IP from pool '
'in network %(network)s on subnet %(cidr)s.' %
{'network': network['id'],
'cidr': network['cidr']},
instance=instance)
fip = objects.FixedIP.associate_pool(
context.elevated(), network['id'], instance_id)
address = str(fip.address)
vif = objects.VirtualInterface.get_by_instance_and_network(
context, instance_id, network['id'])
if vif is None:
LOG.debug('vif for network %(network)s is used up, '
'trying to create new vif',
{'network': network['id']}, instance=instance)
vif = self._add_virtual_interface(context,
instance_id, network['id'])
fip.allocated = True
fip.virtual_interface_id = vif.id
fip.save()
cleanup.append(functools.partial(fip.disassociate, context))
LOG.debug('Refreshing security group members for instance.',
instance=instance)
self._do_trigger_security_group_members_refresh_for_instance(
instance_id)
cleanup.append(functools.partial(
self._do_trigger_security_group_members_refresh_for_instance, # noqa
instance_id))
name = instance.display_name
if self._validate_instance_zone_for_dns_domain(context, instance):
self.instance_dns_manager.create_entry(
name, str(fip.address), "A", self.instance_dns_domain)
cleanup.append(functools.partial(
self.instance_dns_manager.delete_entry,
name, self.instance_dns_domain))
self.instance_dns_manager.create_entry(
instance_id, str(fip.address), "A",
self.instance_dns_domain)
cleanup.append(functools.partial(
self.instance_dns_manager.delete_entry,
instance_id, self.instance_dns_domain))
LOG.debug('Setting up network %(network)s on host %(host)s.' %
{'network': network['id'], 'host': self.host},
instance=instance)
self._setup_network_on_host(context, network)
cleanup.append(functools.partial(
self._teardown_network_on_host,
context, network))
quotas.commit(context)
if address is None:
# TODO(mriedem): should _setup_network_on_host return the addr?
LOG.debug('Fixed IP is setup on network %s but not returning '
'the specific IP from the base network manager.',
network['uuid'], instance=instance)
else:
LOG.debug('Allocated fixed ip %s on network %s', address,
network['uuid'], instance=instance)
return address
except Exception:
with excutils.save_and_reraise_exception():
for f in cleanup:
try:
f()
except Exception:
LOG.warning(_LW('Error cleaning up fixed ip '
'allocation. Manual cleanup may '
'be required.'), exc_info=True)
def deallocate_fixed_ip(self, context, address, host=None, teardown=True,
instance=None):
"""Returns a fixed ip to the pool."""
fixed_ip_ref = objects.FixedIP.get_by_address(
context, address, expected_attrs=['network'])
instance_uuid = fixed_ip_ref.instance_uuid
vif_id = fixed_ip_ref.virtual_interface_id
LOG.debug('Deallocate fixed ip %s', address,
instance_uuid=instance_uuid)
if not instance:
# NOTE(vish) This db query could be removed if we pass az and name
# (or the whole instance object).
# NOTE(danms) We can't use fixed_ip_ref.instance because
# instance may be deleted and the relationship
# doesn't extend to deleted instances
instance = objects.Instance.get_by_uuid(
context.elevated(read_deleted='yes'), instance_uuid)
quotas = self.quotas_cls()
quota_project, quota_user = quotas_obj.ids_from_instance(context,
instance)
try:
quotas.reserve(context, fixed_ips=-1, project_id=quota_project,
user_id=quota_user)
except Exception:
LOG.exception(_LE("Failed to update usages deallocating "
"fixed IP"))
try:
self._do_trigger_security_group_members_refresh_for_instance(
instance_uuid)
if self._validate_instance_zone_for_dns_domain(context, instance):
for n in self.instance_dns_manager.get_entries_by_address(
address, self.instance_dns_domain):
self.instance_dns_manager.delete_entry(n,
self.instance_dns_domain)
fixed_ip_ref.allocated = False
fixed_ip_ref.save()
if teardown:
network = fixed_ip_ref.network
if CONF.force_dhcp_release:
dev = self.driver.get_dev(network)
# NOTE(vish): The below errors should never happen, but
# there may be a race condition that is causing
# them per
# https://code.launchpad.net/bugs/968457,
# so we log an error to help track down
# the possible race.
if not vif_id:
LOG.error(_LE("Unable to release %s because vif "
"doesn't exist"), address)
return
vif = objects.VirtualInterface.get_by_id(context, vif_id)
if not vif:
LOG.error(_LE("Unable to release %s because vif "
"object doesn't exist"), address)
return
# NOTE(cfb): Call teardown before release_dhcp to ensure
# that the IP can't be re-leased after a release
# packet is sent.
self._teardown_network_on_host(context, network)
# NOTE(vish): This forces a packet so that the
# release_fixed_ip callback will
# get called by nova-dhcpbridge.
try:
self.driver.release_dhcp(dev, address, vif.address)
except exception.NetworkDhcpReleaseFailed:
LOG.error(_LE("Error releasing DHCP for IP %(address)s"
" with MAC %(mac_address)s"),
{'address': address,
'mac_address': vif.address},
instance=instance)
# NOTE(yufang521247): This is probably a failed dhcp fixed
# ip. DHCPRELEASE packet sent to dnsmasq would not trigger
# dhcp-bridge to run. Thus it is better to disassociate
# such fixed ip here.
fixed_ip_ref = objects.FixedIP.get_by_address(
context, address)
if (instance_uuid == fixed_ip_ref.instance_uuid and
not fixed_ip_ref.leased):
fixed_ip_ref.disassociate()
else:
# We can't try to free the IP address so just call teardown
self._teardown_network_on_host(context, network)
except Exception:
with excutils.save_and_reraise_exception():
try:
quotas.rollback(context)
except Exception:
LOG.warning(_LW("Failed to rollback quota for "
"deallocate fixed ip: %s"), address,
instance=instance)
# Commit the reservations
quotas.commit(context)
def lease_fixed_ip(self, context, address):
"""Called by dhcp-bridge when ip is leased."""
LOG.debug('Leased IP |%s|', address, context=context)
fixed_ip = objects.FixedIP.get_by_address(context, address)
if fixed_ip.instance_uuid is None:
LOG.warning(_LW('IP %s leased that is not associated'), address,
context=context)
return
fixed_ip.leased = True
fixed_ip.save()
if not fixed_ip.allocated:
LOG.warning(_LW('IP |%s| leased that isn\'t allocated'), address,
context=context)
def release_fixed_ip(self, context, address):
"""Called by dhcp-bridge when ip is released."""
LOG.debug('Released IP |%s|', address, context=context)
fixed_ip = objects.FixedIP.get_by_address(context, address)
if fixed_ip.instance_uuid is None:
LOG.warning(_LW('IP %s released that is not associated'), address,
context=context)
return
if not fixed_ip.leased:
LOG.warning(_LW('IP %s released that was not leased'), address,
context=context)
fixed_ip.leased = False
fixed_ip.save()
if not fixed_ip.allocated:
fixed_ip.disassociate()
@staticmethod
def _convert_int_args(kwargs):
int_args = ("network_size", "num_networks",
"vlan_start", "vpn_start")
for key in int_args:
try:
value = kwargs.get(key)
if value is None:
continue
kwargs[key] = int(value)
except ValueError:
raise exception.InvalidIntValue(key=key)
def create_networks(self, context,
label, cidr=None, multi_host=None, num_networks=None,
network_size=None, cidr_v6=None,
gateway=None, gateway_v6=None, bridge=None,
bridge_interface=None, dns1=None, dns2=None,
fixed_cidr=None, allowed_start=None,
allowed_end=None, **kwargs):
arg_names = ("label", "cidr", "multi_host", "num_networks",
"network_size", "cidr_v6",
"gateway", "gateway_v6", "bridge",
"bridge_interface", "dns1", "dns2",
"fixed_cidr", "allowed_start", "allowed_end")
if 'mtu' not in kwargs:
kwargs['mtu'] = CONF.network_device_mtu
if 'dhcp_server' not in kwargs:
kwargs['dhcp_server'] = gateway
if 'enable_dhcp' not in kwargs:
kwargs['enable_dhcp'] = True
if 'share_address' not in kwargs:
kwargs['share_address'] = CONF.share_dhcp_address
for name in arg_names:
kwargs[name] = locals()[name]
self._convert_int_args(kwargs)
# check for certain required inputs
label = kwargs["label"]
if not label:
raise exception.NetworkNotCreated(req="label")
# Size of "label" column in nova.networks is 255, hence the restriction
if len(label) > 255:
raise exception.LabelTooLong()
if not (kwargs["cidr"] or kwargs["cidr_v6"]):
raise exception.NetworkNotCreated(req="cidr or cidr_v6")
kwargs["bridge"] = kwargs["bridge"] or CONF.flat_network_bridge
kwargs["bridge_interface"] = (kwargs["bridge_interface"] or
CONF.flat_interface)
for fld in self.required_create_args:
if not kwargs[fld]:
raise exception.NetworkNotCreated(req=fld)
if kwargs["cidr_v6"]:
# NOTE(vish): just for validation
try:
netaddr.IPNetwork(kwargs["cidr_v6"])
except netaddr.AddrFormatError:
raise exception.InvalidCidr(cidr=kwargs["cidr_v6"])
if kwargs["cidr"]:
try:
fixnet = netaddr.IPNetwork(kwargs["cidr"])
except netaddr.AddrFormatError:
raise exception.InvalidCidr(cidr=kwargs["cidr"])
kwargs["num_networks"] = kwargs["num_networks"] or CONF.num_networks
if not kwargs["network_size"]:
if kwargs["cidr"]:
each_subnet_size = fixnet.size / kwargs["num_networks"]
if each_subnet_size > CONF.network_size:
subnet = 32 - int(math.log(CONF.network_size, 2))
oversize_msg = _LW(
'Subnet(s) too large, defaulting to /%s.'
' To override, specify network_size flag.') % subnet
LOG.warn(oversize_msg)
kwargs["network_size"] = CONF.network_size
else:
kwargs["network_size"] = fixnet.size
else:
kwargs["network_size"] = CONF.network_size
kwargs["multi_host"] = (
CONF.multi_host
if kwargs["multi_host"] is None
else strutils.bool_from_string(kwargs["multi_host"]))
kwargs["vlan_start"] = kwargs.get("vlan_start") or CONF.vlan_start
kwargs["vpn_start"] = kwargs.get("vpn_start") or CONF.vpn_start
kwargs["dns1"] = kwargs["dns1"] or CONF.flat_network_dns
if kwargs["fixed_cidr"]:
try:
kwargs["fixed_cidr"] = netaddr.IPNetwork(kwargs["fixed_cidr"])
except netaddr.AddrFormatError:
raise exception.InvalidCidr(cidr=kwargs["fixed_cidr"])
# Subnet of fixed IPs must fall within fixed range
if kwargs["fixed_cidr"] not in fixnet:
raise exception.AddressOutOfRange(
address=kwargs["fixed_cidr"].network, cidr=fixnet)
LOG.debug('Create network: |%s|', kwargs)
return self._do_create_networks(context, **kwargs)
@staticmethod
def _index_of(subnet, ip):
try:
start = netaddr.IPAddress(ip)
except netaddr.AddrFormatError:
raise exception.InvalidAddress(address=ip)
index = start.value - subnet.value
if index < 0 or index >= subnet.size:
raise exception.AddressOutOfRange(address=ip, cidr=str(subnet))
return index
def _do_create_networks(self, context,
label, cidr, multi_host, num_networks,
network_size, cidr_v6, gateway, gateway_v6, bridge,
bridge_interface, dns1=None, dns2=None,
fixed_cidr=None, mtu=None, dhcp_server=None,
enable_dhcp=None, share_address=None,
allowed_start=None, allowed_end=None, **kwargs):
"""Create networks based on parameters."""
# NOTE(jkoelker): these are dummy values to make sure iter works
# TODO(tr3buchet): disallow carving up networks
fixed_net_v4 = netaddr.IPNetwork('0/32')
fixed_net_v6 = netaddr.IPNetwork('::0/128')
subnets_v4 = []
subnets_v6 = []
if kwargs.get('ipam'):
if cidr_v6:
subnets_v6 = [netaddr.IPNetwork(cidr_v6)]
if cidr:
subnets_v4 = [netaddr.IPNetwork(cidr)]
else:
subnet_bits = int(math.ceil(math.log(network_size, 2)))
if cidr_v6:
fixed_net_v6 = netaddr.IPNetwork(cidr_v6)
prefixlen_v6 = 128 - subnet_bits
# smallest subnet in IPv6 ethernet network is /64
if prefixlen_v6 > 64:
prefixlen_v6 = 64
subnets_v6 = fixed_net_v6.subnet(prefixlen_v6,
count=num_networks)
if cidr:
fixed_net_v4 = netaddr.IPNetwork(cidr)
prefixlen_v4 = 32 - subnet_bits
subnets_v4 = list(fixed_net_v4.subnet(prefixlen_v4,
count=num_networks))
if cidr:
# NOTE(jkoelker): This replaces the _validate_cidrs call and
# prevents looping multiple times
try:
nets = objects.NetworkList.get_all(context)
except exception.NoNetworksFound:
nets = []
num_used_nets = len(nets)
used_subnets = [net.cidr for net in nets]
def find_next(subnet):
next_subnet = subnet.next()
while next_subnet in subnets_v4:
next_subnet = next_subnet.next()
if next_subnet in fixed_net_v4:
return next_subnet
for subnet in list(subnets_v4):
if subnet in used_subnets:
next_subnet = find_next(subnet)
if next_subnet:
subnets_v4.remove(subnet)
subnets_v4.append(next_subnet)
subnet = next_subnet
else:
raise exception.CidrConflict(cidr=subnet,
other=subnet)
for used_subnet in used_subnets:
if subnet in used_subnet:
raise exception.CidrConflict(cidr=subnet,
other=used_subnet)
if used_subnet in subnet:
next_subnet = find_next(subnet)
if next_subnet:
subnets_v4.remove(subnet)
subnets_v4.append(next_subnet)
subnet = next_subnet
else:
raise exception.CidrConflict(cidr=subnet,
other=used_subnet)
networks = objects.NetworkList(context=context, objects=[])
subnets = itertools.izip_longest(subnets_v4, subnets_v6)
for index, (subnet_v4, subnet_v6) in enumerate(subnets):
net = objects.Network(context=context)
net.bridge = bridge
net.bridge_interface = bridge_interface
net.multi_host = multi_host
net.dns1 = dns1
net.dns2 = dns2
net.mtu = mtu
net.enable_dhcp = enable_dhcp
net.share_address = share_address
net.project_id = kwargs.get('project_id')
if num_networks > 1:
net.label = '%s_%d' % (label, index)
else:
net.label = label
bottom_reserved = self._bottom_reserved_ips
top_reserved = self._top_reserved_ips
extra_reserved = []
if cidr and subnet_v4:
current = subnet_v4[1]
if allowed_start:
val = self._index_of(subnet_v4, allowed_start)
current = netaddr.IPAddress(allowed_start)
bottom_reserved = val
if allowed_end:
val = self._index_of(subnet_v4, allowed_end)
top_reserved = subnet_v4.size - 1 - val
net.cidr = str(subnet_v4)
net.netmask = str(subnet_v4.netmask)
net.broadcast = str(subnet_v4.broadcast)
if gateway:
net.gateway = gateway
else:
net.gateway = current
current += 1
if not dhcp_server:
dhcp_server = net.gateway
net.dhcp_start = current
current += 1
if str(net.dhcp_start) == dhcp_server:
net.dhcp_start = current
net.dhcp_server = dhcp_server
extra_reserved.append(str(net.dhcp_server))
extra_reserved.append(str(net.gateway))
if cidr_v6 and subnet_v6:
net.cidr_v6 = str(subnet_v6)
if gateway_v6:
# use a pre-defined gateway if one is provided
net.gateway_v6 = str(gateway_v6)
else:
net.gateway_v6 = str(subnet_v6[1])
net.netmask_v6 = str(subnet_v6.netmask)
if CONF.network_manager == 'nova.network.manager.VlanManager':
vlan = kwargs.get('vlan', None)
if not vlan:
index_vlan = index + num_used_nets
vlan = kwargs['vlan_start'] + index_vlan
used_vlans = [x.vlan for x in nets]
if vlan in used_vlans:
# That vlan is used, try to get another one
used_vlans.sort()
vlan = used_vlans[-1] + 1
net.vpn_private_address = net.dhcp_start
extra_reserved.append(str(net.vpn_private_address))
net.dhcp_start = net.dhcp_start + 1
net.vlan = vlan
net.bridge = 'br%s' % vlan
# NOTE(vish): This makes ports unique across the cloud, a more
# robust solution would be to make them uniq per ip
index_vpn = index + num_used_nets
net.vpn_public_port = kwargs['vpn_start'] + index_vpn
net.create()
networks.objects.append(net)
if cidr and subnet_v4:
self._create_fixed_ips(context, net.id, fixed_cidr,
extra_reserved, bottom_reserved,
top_reserved)
# NOTE(danms): Remove this in RPC API v2.0
return obj_base.obj_to_primitive(networks)
def delete_network(self, context, fixed_range, uuid,
require_disassociated=True):
# Prefer uuid but we'll also take cidr for backwards compatibility
elevated = context.elevated()
if uuid:
network = objects.Network.get_by_uuid(elevated, uuid)
elif fixed_range:
network = objects.Network.get_by_cidr(elevated, fixed_range)
LOG.debug('Delete network %s', network['uuid'])
if require_disassociated and network.project_id is not None:
raise exception.NetworkHasProject(project_id=network.project_id)
network.destroy()
@property
def _bottom_reserved_ips(self):
"""Number of reserved ips at the bottom of the range."""
return 2 # network, gateway
@property
def _top_reserved_ips(self):
"""Number of reserved ips at the top of the range."""
return 1 # broadcast
def _create_fixed_ips(self, context, network_id, fixed_cidr=None,
extra_reserved=None, bottom_reserved=0,
top_reserved=0):
"""Create all fixed ips for network."""
network = self._get_network_by_id(context, network_id)
if extra_reserved is None:
extra_reserved = []
if not fixed_cidr:
fixed_cidr = netaddr.IPNetwork(network['cidr'])
num_ips = len(fixed_cidr)
ips = []
for index in range(num_ips):
address = str(fixed_cidr[index])
if (index < bottom_reserved or num_ips - index <= top_reserved or
address in extra_reserved):
reserved = True
else:
reserved = False
ips.append({'network_id': network_id,
'address': address,
'reserved': reserved})
objects.FixedIPList.bulk_create(context, ips)
def _allocate_fixed_ips(self, context, instance_id, host, networks,
**kwargs):
"""Calls allocate_fixed_ip once for each network."""
raise NotImplementedError()
def setup_networks_on_host(self, context, instance_id, host,
teardown=False):
"""calls setup/teardown on network hosts for an instance."""
green_threads = []
if teardown:
call_func = self._teardown_network_on_host
else:
call_func = self._setup_network_on_host
instance = objects.Instance.get_by_id(context, instance_id)
vifs = objects.VirtualInterfaceList.get_by_instance_uuid(
context, instance['uuid'])
LOG.debug('Setup networks on host', instance=instance)
for vif in vifs:
network = objects.Network.get_by_id(context, vif.network_id)
if not network.multi_host:
# NOTE (tr3buchet): if using multi_host, host is instance[host]
host = network['host']
if self.host == host or host is None:
# at this point i am the correct host, or host doesn't
# matter -> FlatManager
call_func(context, network)
else:
# i'm not the right host, run call on correct host
green_threads.append(eventlet.spawn(
self.network_rpcapi.rpc_setup_network_on_host, context,
network.id, teardown, host))
# wait for all of the setups (if any) to finish
for gt in green_threads:
gt.wait()
def rpc_setup_network_on_host(self, context, network_id, teardown):
if teardown:
call_func = self._teardown_network_on_host
else:
call_func = self._setup_network_on_host
# subcall from original setup_networks_on_host
network = objects.Network.get_by_id(context, network_id)
call_func(context, network)
def _initialize_network(self, network):
if network.enable_dhcp:
is_ext = (network.dhcp_server is not None and
network.dhcp_server != network.gateway)
self.l3driver.initialize_network(network.cidr, is_ext)
self.l3driver.initialize_gateway(network)
def _setup_network_on_host(self, context, network):
"""Sets up network on this host."""
raise NotImplementedError()
def _teardown_network_on_host(self, context, network):
"""Sets up network on this host."""
raise NotImplementedError()
def validate_networks(self, context, networks):
"""check if the networks exists and host
is set to each network.
"""
LOG.debug('Validate networks')
if networks is None or len(networks) == 0:
return
network_uuids = [uuid for (uuid, fixed_ip) in networks]
self._get_networks_by_uuids(context, network_uuids)
for network_uuid, address in networks:
# check if the fixed IP address is valid and
# it actually belongs to the network
if address is not None:
if not netutils.is_valid_ip(address):
raise exception.FixedIpInvalid(address=address)
fixed_ip_ref = objects.FixedIP.get_by_address(
context, address, expected_attrs=['network'])
network = fixed_ip_ref.network
if network.uuid != network_uuid:
raise exception.FixedIpNotFoundForNetwork(
address=address, network_uuid=network_uuid)
if fixed_ip_ref.instance_uuid is not None:
raise exception.FixedIpAlreadyInUse(
address=address,
instance_uuid=fixed_ip_ref.instance_uuid)
def _get_network_by_id(self, context, network_id):
return objects.Network.get_by_id(context, network_id,
project_only='allow_none')
def _get_networks_by_uuids(self, context, network_uuids):
networks = objects.NetworkList.get_by_uuids(
context, network_uuids, project_only="allow_none")
networks.sort(key=lambda x: network_uuids.index(x.uuid))
return networks
def get_vifs_by_instance(self, context, instance_id):
"""Returns the vifs associated with an instance."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
instance = objects.Instance.get_by_id(context, instance_id)
LOG.debug('Get VIFs for instance', instance=instance)
# NOTE(russellb) No need to object-ify this since
# get_vifs_by_instance() is unused and set to be removed.
vifs = objects.VirtualInterfaceList.get_by_instance_uuid(context,
instance.uuid)
for vif in vifs:
if vif.network_id is not None:
network = self._get_network_by_id(context, vif.network_id)
vif.net_uuid = network.uuid
return [dict(vif.iteritems()) for vif in vifs]
def get_instance_id_by_floating_address(self, context, address):
"""Returns the instance id a floating ip's fixed ip is allocated to."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
LOG.debug('Get instance for floating address %s', address)
fixed_ip = objects.FixedIP.get_by_floating_address(context, address)
if fixed_ip is None:
return None
else:
return fixed_ip.instance_uuid
def get_network(self, context, network_uuid):
# NOTE(vish): used locally
return objects.Network.get_by_uuid(context.elevated(), network_uuid)
def get_all_networks(self, context):
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
try:
return obj_base.obj_to_primitive(
objects.NetworkList.get_all(context))
except exception.NoNetworksFound:
return []
def disassociate_network(self, context, network_uuid):
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
network = self.get_network(context, network_uuid)
network.disassociate(context, network.id)
def get_fixed_ip(self, context, id):
"""Return a fixed ip."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return objects.FixedIP.get_by_id(context, id)
def get_fixed_ip_by_address(self, context, address):
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return objects.FixedIP.get_by_address(context, address)
def get_vif_by_mac_address(self, context, mac_address):
"""Returns the vifs record for the mac_address."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
# NOTE(russellb) No need to object-ify this since
# get_vifs_by_instance() is unused and set to be removed.
vif = objects.VirtualInterface.get_by_address(context, mac_address)
if vif.network_id is not None:
network = self._get_network_by_id(context, vif.network_id)
vif.net_uuid = network.uuid
return vif
@periodic_task.periodic_task(
spacing=CONF.dns_update_periodic_interval)
def _periodic_update_dns(self, context):
"""Update local DNS entries of all networks on this host."""
networks = objects.NetworkList.get_by_host(context, self.host)
for network in networks:
dev = self.driver.get_dev(network)
self.driver.update_dns(context, dev, network)
def update_dns(self, context, network_ids):
"""Called when fixed IP is allocated or deallocated."""
if CONF.fake_network:
return
LOG.debug('Update DNS for network ids: %s', network_ids)
networks = [network for network in
objects.NetworkList.get_by_host(context, self.host)
if network.multi_host and network.id in network_ids]
for network in networks:
dev = self.driver.get_dev(network)
self.driver.update_dns(context, dev, network)
def add_network_to_project(self, ctxt, project_id, network_uuid):
raise NotImplementedError()
class FlatManager(NetworkManager):
"""Basic network where no vlans are used.
FlatManager does not do any bridge or vlan creation. The user is
responsible for setting up whatever bridges are specified when creating
networks through nova-manage. This bridge needs to be created on all
compute hosts.
The idea is to create a single network for the host with a command like:
nova-manage network create 192.168.0.0/24 1 256. Creating multiple
networks for one manager is currently not supported, but could be
added by modifying allocate_fixed_ip and get_network to get the network
with new logic. Arbitrary lists of addresses in a single network can
be accomplished with manual db editing.
If flat_injected is True, the compute host will attempt to inject network
config into the guest. It attempts to modify /etc/network/interfaces and
currently only works on debian based systems. To support a wider range of
OSes, some other method may need to be devised to let the guest know which
ip it should be using so that it can configure itself. Perhaps an attached
disk or serial device with configuration info.
Metadata forwarding must be handled by the gateway, and since nova does
not do any setup in this mode, it must be done manually. Requests to
169.254.169.254 port 80 will need to be forwarded to the api server.
"""
timeout_fixed_ips = False
required_create_args = ['bridge']
def _allocate_fixed_ips(self, context, instance_id, host, networks,
**kwargs):
"""Calls allocate_fixed_ip once for each network."""
requested_networks = kwargs.get('requested_networks')
addresses_by_network = {}
if requested_networks is not None:
for request in requested_networks:
addresses_by_network[request.network_id] = request.address
for network in networks:
if network['uuid'] in addresses_by_network:
address = addresses_by_network[network['uuid']]
else:
address = None
self.allocate_fixed_ip(context, instance_id,
network, address=address)
def deallocate_fixed_ip(self, context, address, host=None, teardown=True,
instance=None):
"""Returns a fixed ip to the pool."""
super(FlatManager, self).deallocate_fixed_ip(context, address, host,
teardown,
instance=instance)
objects.FixedIP.disassociate_by_address(context, address)
def _setup_network_on_host(self, context, network):
"""Setup Network on this host."""
# NOTE(tr3buchet): this does not need to happen on every ip
# allocation, this functionality makes more sense in create_network
# but we'd have to move the flat_injected flag to compute
network.injected = CONF.flat_injected
network.save()
def _teardown_network_on_host(self, context, network):
"""Tear down network on this host."""
pass
# NOTE(justinsb): The floating ip functions are stub-implemented.
# We were throwing an exception, but this was messing up horizon.
# Timing makes it difficult to implement floating ips here, in Essex.
def get_floating_ip(self, context, id):
"""Returns a floating IP as a dict."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return None
def get_floating_pools(self, context):
"""Returns list of floating pools."""
# NOTE(maurosr) This method should be removed in future, replaced by
# get_floating_ip_pools. See bug #1091668
return {}
def get_floating_ip_pools(self, context):
"""Returns list of floating ip pools."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return {}
def get_floating_ip_by_address(self, context, address):
"""Returns a floating IP as a dict."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return None
def get_floating_ips_by_project(self, context):
"""Returns the floating IPs allocated to a project."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return []
def get_floating_ips_by_fixed_address(self, context, fixed_address):
"""Returns the floating IPs associated with a fixed_address."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
return []
def allocate_floating_ip(self, context, project_id, pool):
"""Gets a floating ip from the pool."""
return None
def deallocate_floating_ip(self, context, address,
affect_auto_assigned):
"""Returns a floating ip to the pool."""
return None
def associate_floating_ip(self, context, floating_address, fixed_address,
affect_auto_assigned=False):
"""Associates a floating ip with a fixed ip.
Makes sure everything makes sense then calls _associate_floating_ip,
rpc'ing to correct host if i'm not it.
"""
return None
def disassociate_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Disassociates a floating ip from its fixed ip.
Makes sure everything makes sense then calls _disassociate_floating_ip,
rpc'ing to correct host if i'm not it.
"""
return None
def migrate_instance_start(self, context, instance_uuid,
floating_addresses,
rxtx_factor=None, project_id=None,
source=None, dest=None):
pass
def migrate_instance_finish(self, context, instance_uuid,
floating_addresses, host=None,
rxtx_factor=None, project_id=None,
source=None, dest=None):
pass
def update_dns(self, context, network_ids):
"""Called when fixed IP is allocated or deallocated."""
pass
class FlatDHCPManager(RPCAllocateFixedIP, floating_ips.FloatingIP,
NetworkManager):
"""Flat networking with dhcp.
FlatDHCPManager will start up one dhcp server to give out addresses.
It never injects network settings into the guest. It also manages bridges.
Otherwise it behaves like FlatManager.
"""
SHOULD_CREATE_BRIDGE = True
DHCP = True
required_create_args = ['bridge']
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
ctxt = context.get_admin_context()
networks = objects.NetworkList.get_by_host(ctxt, self.host)
self.driver.iptables_manager.defer_apply_on()
self.l3driver.initialize(fixed_range=False, networks=networks)
super(FlatDHCPManager, self).init_host()
self.init_host_floating_ips()
self.driver.iptables_manager.defer_apply_off()
def _setup_network_on_host(self, context, network):
"""Sets up network on this host."""
network.dhcp_server = self._get_dhcp_ip(context, network)
self._initialize_network(network)
# NOTE(vish): if dhcp server is not set then don't dhcp
if not CONF.fake_network and network.enable_dhcp:
dev = self.driver.get_dev(network)
# NOTE(dprince): dhcp DB queries require elevated context
elevated = context.elevated()
self.driver.update_dhcp(elevated, dev, network)
if CONF.use_ipv6:
self.driver.update_ra(context, dev, network)
gateway = utils.get_my_linklocal(dev)
network.gateway_v6 = gateway
network.save()
def _teardown_network_on_host(self, context, network):
# NOTE(vish): if dhcp server is not set then don't dhcp
if not CONF.fake_network and network.enable_dhcp:
network['dhcp_server'] = self._get_dhcp_ip(context, network)
dev = self.driver.get_dev(network)
# NOTE(dprince): dhcp DB queries require elevated context
elevated = context.elevated()
self.driver.update_dhcp(elevated, dev, network)
def _get_network_dict(self, network):
"""Returns the dict representing necessary and meta network fields."""
# get generic network fields
network_dict = super(FlatDHCPManager, self)._get_network_dict(network)
# get flat dhcp specific fields
if self.SHOULD_CREATE_BRIDGE:
network_dict['should_create_bridge'] = self.SHOULD_CREATE_BRIDGE
if network.get('bridge_interface'):
network_dict['bridge_interface'] = network['bridge_interface']
if network.get('multi_host'):
network_dict['multi_host'] = network['multi_host']
return network_dict
class VlanManager(RPCAllocateFixedIP, floating_ips.FloatingIP, NetworkManager):
"""Vlan network with dhcp.
VlanManager is the most complicated. It will create a host-managed
vlan for each project. Each project gets its own subnet. The networks
and associated subnets are created with nova-manage using a command like:
nova-manage network create 10.0.0.0/8 3 16. This will create 3 networks
of 16 addresses from the beginning of the 10.0.0.0 range.
A dhcp server is run for each subnet, so each project will have its own.
For this mode to be useful, each project will need a vpn to access the
instances in its subnet.
"""
SHOULD_CREATE_BRIDGE = True
SHOULD_CREATE_VLAN = True
DHCP = True
required_create_args = ['bridge_interface']
def __init__(self, network_driver=None, *args, **kwargs):
super(VlanManager, self).__init__(network_driver=network_driver,
*args, **kwargs)
# NOTE(cfb) VlanManager doesn't enforce quotas on fixed IP addresses
# because a project is assigned an entire network.
self.quotas_cls = objects.QuotasNoOp
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
LOG.debug('Setup network on host %s', self.host)
ctxt = context.get_admin_context()
networks = objects.NetworkList.get_by_host(ctxt, self.host)
self.driver.iptables_manager.defer_apply_on()
self.l3driver.initialize(fixed_range=False, networks=networks)
NetworkManager.init_host(self)
self.init_host_floating_ips()
self.driver.iptables_manager.defer_apply_off()
def allocate_fixed_ip(self, context, instance_id, network, **kwargs):
"""Gets a fixed ip from the pool."""
LOG.debug('Allocate fixed ip on network %s', network['uuid'],
instance_uuid=instance_id)
if kwargs.get('vpn', None):
address = network['vpn_private_address']
fip = objects.FixedIP.associate(context, str(address),
instance_id, network['id'],
reserved=True)
else:
address = kwargs.get('address', None)
if address:
fip = objects.FixedIP.associate(context, str(address),
instance_id,
network['id'])
else:
fip = objects.FixedIP.associate_pool(context,
network['id'],
instance_id)
address = fip.address
vif = objects.VirtualInterface.get_by_instance_and_network(
context, instance_id, network['id'])
if vif is None:
LOG.debug('vif for network %(network)s and instance '
'%(instance_id)s is used up, '
'trying to create new vif',
{'network': network['id'],
'instance_id': instance_id})
vif = self._add_virtual_interface(context,
instance_id, network['id'])
fip.allocated = True
fip.virtual_interface_id = vif.id
fip.save()
if not kwargs.get('vpn', None):
self._do_trigger_security_group_members_refresh_for_instance(
instance_id)
# NOTE(vish) This db query could be removed if we pass az and name
# (or the whole instance object).
instance = objects.Instance.get_by_uuid(context, instance_id)
name = instance.display_name
if self._validate_instance_zone_for_dns_domain(context, instance):
self.instance_dns_manager.create_entry(name, address,
"A",
self.instance_dns_domain)
self.instance_dns_manager.create_entry(instance_id, address,
"A",
self.instance_dns_domain)
self._setup_network_on_host(context, network)
LOG.debug('Allocated fixed ip %s on network %s', address,
network['uuid'], instance=instance)
return address
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force adds another network to a project."""
LOG.debug('Add network %s to project %s', network_uuid, project_id)
if network_uuid is not None:
network_id = self.get_network(context, network_uuid).id
else:
network_id = None
objects.Network.associate(context, project_id, network_id, force=True)
def associate(self, context, network_uuid, associations):
"""Associate or disassociate host or project to network."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi to 2.0.
LOG.debug('Associate network %s: |%s|', network_uuid, associations)
network = self.get_network(context, network_uuid)
network_id = network.id
if 'host' in associations:
host = associations['host']
if host is None:
network.disassociate(context, network_id,
host=True, project=False)
else:
network.host = self.host
network.save()
if 'project' in associations:
project = associations['project']
if project is None:
network.disassociate(context, network_id,
host=False, project=True)
else:
network.associate(context, project, network_id, force=True)
def _get_network_by_id(self, context, network_id):
# NOTE(vish): Don't allow access to networks with project_id=None as
# these are networks that haven't been allocated to a
# project yet.
return objects.Network.get_by_id(context, network_id,
project_only=True)
def _get_networks_by_uuids(self, context, network_uuids):
# NOTE(vish): Don't allow access to networks with project_id=None as
# these are networks that haven't been allocated to a
# project yet.
networks = objects.NetworkList.get_by_uuids(
context, network_uuids, project_only=True)
networks.sort(key=lambda x: network_uuids.index(x.uuid))
return networks
def _get_networks_for_instance(self, context, instance_id, project_id,
requested_networks=None):
"""Determine which networks an instance should connect to."""
# get networks associated with project
if requested_networks is not None and len(requested_networks) != 0:
network_uuids = [request.network_id
for request in requested_networks]
networks = self._get_networks_by_uuids(context, network_uuids)
else:
# NOTE(vish): Allocates network on demand so requires admin.
networks = objects.NetworkList.get_by_project(
context.elevated(), project_id)
return networks
def create_networks(self, context, **kwargs):
"""Create networks based on parameters."""
self._convert_int_args(kwargs)
kwargs["vlan_start"] = kwargs.get("vlan_start") or CONF.vlan_start
kwargs["num_networks"] = (kwargs.get("num_networks") or
CONF.num_networks)
kwargs["network_size"] = (kwargs.get("network_size") or
CONF.network_size)
# Check that num_networks + vlan_start is not > 4094, fixes lp708025
if kwargs["num_networks"] + kwargs["vlan_start"] > 4094:
raise ValueError(_('The sum between the number of networks and'
' the vlan start cannot be greater'
' than 4094'))
# Check that vlan is not greater than 4094 or less then 1
vlan_num = kwargs.get("vlan", None)
if vlan_num is not None:
try:
vlan_num = int(vlan_num)
except ValueError:
raise ValueError(_("vlan must be an integer"))
if vlan_num > 4094:
raise ValueError(_('The vlan number cannot be greater than'
' 4094'))
if vlan_num < 1:
raise ValueError(_('The vlan number cannot be less than 1'))
# check that num networks and network size fits in fixed_net
fixed_net = netaddr.IPNetwork(kwargs['cidr'])
if fixed_net.size < kwargs['num_networks'] * kwargs['network_size']:
raise ValueError(_('The network range is not '
'big enough to fit %(num_networks)s networks. Network '
'size is %(network_size)s') % kwargs)
kwargs['bridge_interface'] = (kwargs.get('bridge_interface') or
CONF.vlan_interface)
LOG.debug('Create network: |%s|', kwargs)
return NetworkManager.create_networks(
self, context, vpn=True, **kwargs)
@utils.synchronized('setup_network', external=True)
def _setup_network_on_host(self, context, network):
"""Sets up network on this host."""
if not network.vpn_public_address:
address = CONF.vpn_ip
network.vpn_public_address = address
network.save()
else:
address = network.vpn_public_address
network.dhcp_server = self._get_dhcp_ip(context, network)
self._initialize_network(network)
# NOTE(vish): only ensure this forward if the address hasn't been set
# manually.
if address == CONF.vpn_ip and hasattr(self.driver,
"ensure_vpn_forward"):
self.l3driver.add_vpn(CONF.vpn_ip,
network.vpn_public_port,
network.vpn_private_address)
if not CONF.fake_network:
dev = self.driver.get_dev(network)
# NOTE(dprince): dhcp DB queries require elevated context
if network.enable_dhcp:
elevated = context.elevated()
self.driver.update_dhcp(elevated, dev, network)
if CONF.use_ipv6:
self.driver.update_ra(context, dev, network)
gateway = utils.get_my_linklocal(dev)
network.gateway_v6 = gateway
network.save()
@utils.synchronized('setup_network', external=True)
def _teardown_network_on_host(self, context, network):
if not CONF.fake_network:
network['dhcp_server'] = self._get_dhcp_ip(context, network)
dev = self.driver.get_dev(network)
# NOTE(ethuleau): For multi hosted networks, if the network is no
# more used on this host and if VPN forwarding rule aren't handed
# by the host, we delete the network gateway.
vpn_address = network['vpn_public_address']
if (CONF.teardown_unused_network_gateway and
network['multi_host'] and vpn_address != CONF.vpn_ip and
not objects.Network.in_use_on_host(context, network['id'],
self.host)):
LOG.debug("Remove unused gateway %s", network['bridge'])
if network.enable_dhcp:
self.driver.kill_dhcp(dev)
self.l3driver.remove_gateway(network)
if not self._uses_shared_ip(network):
fip = objects.FixedIP.get_by_address(context,
network.dhcp_server)
fip.allocated = False
fip.host = None
fip.save()
# NOTE(vish): if dhcp server is not set then don't dhcp
elif network.enable_dhcp:
# NOTE(dprince): dhcp DB queries require elevated context
elevated = context.elevated()
self.driver.update_dhcp(elevated, dev, network)
def _get_network_dict(self, network):
"""Returns the dict representing necessary and meta network fields."""
# get generic network fields
network_dict = super(VlanManager, self)._get_network_dict(network)
# get vlan specific network fields
if self.SHOULD_CREATE_BRIDGE:
network_dict['should_create_bridge'] = self.SHOULD_CREATE_BRIDGE
if self.SHOULD_CREATE_VLAN:
network_dict['should_create_vlan'] = self.SHOULD_CREATE_VLAN
for k in ['vlan', 'bridge_interface', 'multi_host']:
if network.get(k):
network_dict[k] = network[k]
return network_dict
@property
def _bottom_reserved_ips(self):
"""Number of reserved ips at the bottom of the range."""
return super(VlanManager, self)._bottom_reserved_ips + 1 # vpn server
@property
def _top_reserved_ips(self):
"""Number of reserved ips at the top of the range."""
parent_reserved = super(VlanManager, self)._top_reserved_ips
return parent_reserved + CONF.cnt_vpn_clients
| {
"content_hash": "4a4b04c4e419671dd51f68d81b0a8dd2",
"timestamp": "",
"source": "github",
"line_count": 2136,
"max_line_length": 89,
"avg_line_length": 44.18445692883895,
"alnum_prop": 0.5511665854330459,
"repo_name": "sajeeshcs/nested_quota_latest",
"id": "1ac503714f25867caa755457d41337a2949305ae",
"size": "95199",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/network/manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15283878"
},
{
"name": "Shell",
"bytes": "18626"
}
],
"symlink_target": ""
} |
from net_system.models import NetworkDevice
import django
def main():
django.setup()
devices=NetworkDevice.objects.all()
for device in devices:
print device.device_name
if __name__ == "__main__":
main()
| {
"content_hash": "dd71573fc022413d67ddbcf3ed8ca976",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 43,
"avg_line_length": 20.818181818181817,
"alnum_prop": 0.6593886462882096,
"repo_name": "patrebert/pynet_cert",
"id": "cc3b578fc2bea8a6756801107a766cb9fb02b1d8",
"size": "230",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "class8/ex4/list.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "OpenEdge ABL",
"bytes": "144"
},
{
"name": "Python",
"bytes": "69997"
},
{
"name": "Shell",
"bytes": "597"
}
],
"symlink_target": ""
} |
from .base import BaseReporter
class ReasonsReporter(BaseReporter):
title = 'Reasons'
def run(self, error):
return getattr(error, 'reasons', None)
| {
"content_hash": "7d16d4d06e35c54945e8167ee3f8ea29",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 46,
"avg_line_length": 18.555555555555557,
"alnum_prop": 0.6826347305389222,
"repo_name": "grappa-py/grappa",
"id": "c1c72ebbe75c2a0629d403d96227abe39223df8e",
"size": "191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grappa/reporters/reasons.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1520"
},
{
"name": "Python",
"bytes": "144569"
}
],
"symlink_target": ""
} |
import subprocess
import os.path
import socket
import time
import sys
import os
from shutil import copyfile
#
# Configuration variables
#
package_name = os.environ["PACKAGE_NAME"]
main_activity = os.environ["ACTIVITY_NAME"]
shared_library_dir = os.environ["SHARED_LIB_PATH"]
android_ndk_gdb = os.environ["NDK_PATH"] + "/prebuilt/windows-x86_64/bin/gdb.exe"
adb_tool = os.environ["ADB_PATH"] + "/adb.exe"
os.environ["NDT_FOLDER"] = os.getcwd()
#
# do not touch these variables
#
start_app = False
ndt_path = ""
#
# it defines utils functions
#
def ensureAdbIsReady():
command = adb_tool + " start-server "
subprocess.Popen(command, stdout=subprocess.PIPE).wait();
def adbRunApp(packagename, activity):
command = adb_tool + " shell am start " + packagename + "/" + activity
subprocess.Popen(command, stdout=subprocess.PIPE).wait();
return
def adbFileExists(filename):
command = adb_tool + " shell ls " + filename
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc.wait()
str = proc.stdout.readline()
if "No such file" in str:
return False
if len(str) is 0:
return False
return True
def adbCreateFile(filename, content) :
f = open("commands_tmp.txt","w")
f.write(content)
f.close()
command = adb_tool + " push commands_tmp.txt " + filename
proc = subprocess.Popen(command)
proc.wait()
os.remove("commands_tmp.txt")
def adbDeleteFile(filename) :
command = adb_tool + " shell rm " + filename
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
proc.wait()
def adbPullFile(src, dst) :
command = adb_tool + " pull " + src + " " + dst
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
proc.wait()
def adbIsDebuggable(packagename) :
command = adb_tool + " shell run-as " + packagename + " echo yes"
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
proc.wait()
str = proc.stdout.readline()
if "is not debuggable" in str:
return False
return True
def system(command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
proc.wait()
def isDeviceConnected() :
command = adb_tool + " devices"
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
proc.wait()
str = proc.stdout.readline()
str = proc.stdout.readline()
if "device" in str:
return True
return False
def isDeviceX86():
command = adb_tool + " shell getprop ro.product.cpu.abilist"
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
proc.wait()
str = proc.stdout.readline()
if "x86" in str:
return True
return False
def adbPidOf(packagename):
command = adb_tool + " shell ps | grep " + packagename
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
proc.wait()
str = proc.stdout.readline()
if len(str) is 0:
return None;
return filter(None, str.split(" "))[1]
def findNdtPath():
ndt_path = "";
if adbFileExists("/data/data/com.android.ndt/lib/gdbserver.so"):
ndt_path = "/data/data/com.android.ndt/lib/"
elif adbFileExists("/data/app/com.android.ndt-1/lib/gdbserver.so"):
ndt_path = "/data/app/com.android.ndt-1/lib/"
elif adbFileExists("/data/app/com.android.ndt-2/lib/gdbserver.so"):
ndt_path = "/data/app/com.android.ndt-2/lib/"
elif adbFileExists("/data/app/com.android.ndt-3/lib/gdbserver.so"):
ndt_path = "/data/app/com.android.ndt-3/lib/"
else:
ndt_path = "";
return ndt_path;
print "\r\n[+] Current config:"
print " Package name : " + package_name
print " Main Activity : " + main_activity
print " Shared library folder : " + shared_library_dir
print " Gdb executable : " + android_ndk_gdb
print "\r\n"
ensureAdbIsReady();
print " [+] Checking whether device is connected"
if not isDeviceConnected() :
print "Error: device disconnected!"
sys.exit(0);
commandSys = adb_tool + ' forward tcp:12345 tcp:12345'
system(commandSys);
print " [+] Checking whether application is debuggable"
if not adbIsDebuggable(package_name):
print "\r\n Error: application is not debuggable"
sys.exit(0);
print " [+] Checking whether native debugging tools are installed"
ndt_path = findNdtPath()
if len(ndt_path) is 0:
print " Installing Native Debugging tools..."
commandSys = adb_tool + ' install -r -d ./../../device/native-debugging-tools.apk'
system(commandSys)
ndt_path = findNdtPath()
if len(ndt_path) is 0:
print "Installation failed"
sys.exit(0);
print " Installation found : " + ndt_path
print " [+] Checking whether application is running "
pid = adbPidOf(package_name)
if pid == None:
print " Application is not running (debug from start)"
start_app = True
else:
print " Pid: " + pid
if start_app :
print " Creating commands.txt into the device"
print "\r\n IMPORTANT NOTE: It only works if the applications implements AndroidRemoteExec"
adbCreateFile("/data/local/tmp/commands.txt", ndt_path + "gdbserver.so :12345 --attach {PID}")
adbRunApp(package_name, main_activity);
time.sleep(2);
adbDeleteFile("/data/local/tmp/commands.txt");
else :
print " [+] Connecting to remote process"
commandSys = adb_tool + ' forward tcp:3435 tcp:3435'
system(commandSys);
s = None
try :
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("127.0.0.1", 3435))
data = s.recv(1024)
if "in-app-remote-shell" in data:
print " Connection succeed"
print " Attaching debugger"
s.send( ndt_path + "gdbserver.so :12345 --attach {PID}")
time.sleep(1);
s.close();
else:
# print "\r\n Error: Application doesn't implement AndroidRemoteExec"
# print " Read ./AndroidRemoteExec/README.md for more info"
exit(1)
except:
print "\r\n Error: connection failed (127.0.0.1:3435)"
print " Does your application implement AndroidRemoteExec ?"
print " if it does, try again or try restarting the app/device"
exit(1)
# creating commands.txt file for gdb client
print " [+] Creating configuration for Gdb client"
if isDeviceX86() :
copyfile(".gdbinit", shared_library_dir + "/x86/.gdbinit")
os.chdir(shared_library_dir + "/x86/")
else:
copyfile(".gdbinit", shared_library_dir + "/armeabi-v7a/.gdbinit")
os.chdir(shared_library_dir + "/armeabi-v7a/")
if os.path.exists("commands.txt") :
os.remove("commands.txt")
commands_file = open("commands.txt", "w")
commands_file.write("set osabi GNU/Linux \r\n");
if isDeviceX86() :
commands_file.write("set architecture i386 \r\n");
else:
commands_file.write("set architecture arm \r\n");
commands_file.write("set solib-search-path ./ \r\n");
commands_file.write("set sysroot ./ \r\n");
commands_file.write("file app_process32 \r\n");
commands_file.write("target remote:12345 \r\n");
commands_file.write("handle SIG33 nostop \r\n");
commands_file.write("handle SIG33 noprint \r\n");
commands_file.write("shell cls\r\n");
commands_file.write("shell echo.\r\n");
commands_file.write("shell echo Welcome to the GNU Project Debugger\r\n");
commands_file.write("shell echo you are debugging: " + package_name + "\r\n");
commands_file.write("shell echo type 'help' for a list of gdb commands\r\n");
commands_file.write("shell echo.\r\n");
commands_file.close();
print " [+] Pulling required files from device"
if isDeviceConnected() :
if adbFileExists("/system/bin/app_process32"):
adbPullFile("/system/bin/app_process32", "app_process32")
elif adbFileExists("/system/bin/app_process"):
adbPullFile("/system/bin/app_process", "app_process32")
else :
print " [+] Error: device disconnected!, please reconnect it and try it again"
sys.exit(0)
try:
subprocess.call(android_ndk_gdb + " -q --command=commands.txt -iex \\\"add-auto-load-safe-path /\\\" ");
except:
sys.exit(0); | {
"content_hash": "03aa7bccfd79dcb3acf3bb8266577bf5",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 108,
"avg_line_length": 34.168067226890756,
"alnum_prop": 0.6519921298573537,
"repo_name": "DavidLanderosAlcala/Native-Debugging-Tools",
"id": "7c7233366069299e01f0f9b5dabef1e5a2f6ad44",
"size": "9276",
"binary": false,
"copies": "1",
"ref": "refs/heads/NDT-Lite",
"path": "NativeDebuggingTools/desktop/gdb/run_gdb_debugger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "420"
},
{
"name": "C",
"bytes": "6609"
},
{
"name": "C++",
"bytes": "39779"
},
{
"name": "GDB",
"bytes": "1672"
},
{
"name": "GLSL",
"bytes": "222"
},
{
"name": "HTML",
"bytes": "1155"
},
{
"name": "Java",
"bytes": "2278"
},
{
"name": "JavaScript",
"bytes": "2590"
},
{
"name": "Makefile",
"bytes": "7776"
},
{
"name": "Python",
"bytes": "87974"
}
],
"symlink_target": ""
} |
class _Processor():
def _init_helper(self, vars_):
"""Overwrite defaults (if they exist) with arguments passed to constructor"""
for k in vars_:
if k == 'kwargs':
for kwarg in vars_[k]:
setattr(self, kwarg, vars_[k][kwarg])
elif k != 'self':
setattr(self, k, vars_[k])
def __init__(self, **kwargs):
"""
In a subclass, arguments may be formally defined to avoid the use of keywords
(and to throw errors when bogus keyword arguments are passed)::
def __init__(self, arg1='foo', arg2='bar')
"""
self._init_helper(vars())
| {
"content_hash": "e8cf2b9ec35877f6882233786dd9bc87",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 85,
"avg_line_length": 35.421052631578945,
"alnum_prop": 0.5260029717682021,
"repo_name": "azavea/python-omgeo",
"id": "1dd2a62dbba001617ad8971770515b5707f33964",
"size": "673",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "omgeo/processor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5108"
},
{
"name": "Makefile",
"bytes": "5588"
},
{
"name": "Python",
"bytes": "137188"
},
{
"name": "Shell",
"bytes": "318"
}
],
"symlink_target": ""
} |
"""
*~ Pokus expansion 1.2 ~*
av
Gaute og Nikolas
"""
#Start med å importere alle de nødvendige delfilene.
from klasser import *
from grafikk import *
from quests import *
from prosedyrer import *
from troll import generer_troll
from gnom import generer_gnom
#Mainloop:
def cerberus_loop(spiller, inv, klasser, spellbook):
qlog = klasser.questlog(3)
ferdig = False
if not qlog.hent_quest(0).startet():
fiende = generer_hellhound(spiller, True)
ferdig = not angrip(spiller, fiende, inv, klasser, spellbook, intro=True)
while not ferdig:
cerberus_kart(qlog)
valg = False
quest = False
gaaTilButikk = False
vulkan = False
lagre = False
hule = False
klartenker = False
south = False
cerberus = False
while not valg:
inn = input("Hvor vil du gå?\n> ").lower()
if inn == "f":
valg = True
ferdig = True
if inn == "q":
quest = True
valg = True
if inn == "k":
gaaTilButikk = True
valg = True
if inn == "v":
vulkan = True
valg = True
if inn == "l":
lagre = True
valg = True
if inn == "n" and qlog.hent_quest(5).ferdig():
klartenker = True
valg = True
if inn == "h" and qlog.hent_quest(2).startet():
hule = True
valg = True
if inn == "s" and qlog.hent_quest(3).startet():
hule = True
valg = True
south = True
if inn == "c" and qlog.hent_quest(4).startet():
cerberus = True
valg = True
while quest:
#Merk at oppdrag_tilgjengelige() er en funksjon med returverdi.
inn = qlog.oppdrag_tilgjengelige(spiller.lvl(), "utenfor stallen").lower()
if inn != "f" and inn != "ferdig":
try:
qlog.snakk(int(inn) - 1, spiller, inv)
except ValueError:
print("\nDu må skrive et tall!\n")
else:
quest = False
#progresserer Vasslequest
if qlog.hent_quest(4).ferdig():
klasser.questlog(5).hent_quest(1).progresser()
while gaaTilButikk:
klasser.butikk(2).interaksjon(inv)
gaaTilButikk = False
while lagre:
minnestein(spiller, inv, klasser)
lagre = False
while vulkan:
fiende = generer_vulkan_fiende(spiller)
vulkan = angrip(spiller, fiende, inv, klasser, spellbook)
while hule:
clear_screen()
if south:
print("\n {} drar mot den sørlige krystallhulen.\n".format(spiller.navn()))
else:
print("\n {} drar mot {}krystallhulen.\n".format(\
spiller.navn(), "den nordlige " * int(qlog.hent_quest(3).startet())))
pause()
for x in range(randint(3, 6)):
if not angrip(spiller, generer_vulkan_fiende(spiller), inv, klasser, spellbook):
hule = False
south = False
break
if not hule: break
clear_screen()
print("\n " + spiller.navn(), "har nådd frem til hulen!\n")
if not qlog.hent_quest(3).startet():
print(" Du ser deg rundt. Det er krystaller overalt i hulen, og det virker som om de ")
print(" gjør helveteshundene sterkere! Du ser en gedigen krystall i enden av hulen, ")
print(" kanskje Forsker Frederikk er interessert i den? Du begir deg mot den.\n")
else:
print(" Du begir deg innover mot senteret av hulen.\n")
pause()
for x in range(randint(4, 5)):
if not angrip(spiller, generer_hellhound(spiller, sterk=True), inv, klasser, spellbook, hule=True):
hule = False
south = False
break
if not hule: break
clear_screen()
if not qlog.hent_quest(2).sjekk_ferdig():
print("\n " + spiller.navn(), "har fått tak i den gedigne krystallen!")
print(" Du drar tilbake til forsknigslaben.\n")
qlog.hent_quest(2).progresser()
pause()
hule = False
south = False
break
elif not qlog.hent_quest(3).progresjon() and qlog.hent_quest(3).startet() and not south or\
not qlog.hent_quest(3).progresjon_liste()[0] and qlog.hent_quest(3).startet() and south:
print(" Du er i posisjon til å utplassere duppedingsen!")
if angrip(spiller, generer_beta(spiller, 1 + int(south)), inv, klasser, spellbook, hule=True) \
and (qlog.hent_quest(3).startet() and not qlog.hent_quest(3).progresjon() and not south or \
not qlog.hent_quest(3).progresjon_liste()[0] and qlog.hent_quest(3).startet() and south):
if south:
qlog.hent_quest(3).progresser_liste(0)
south = False
else:
qlog.hent_quest(3).progresser()
print(" minoritetsladningsbærer-hvadetnåvarigjen er suksessfullt plassert!\n")
print(" Du drar tilbake til forskningslaben.\n")
pause()
hule = False
south = False
while cerberus:
print(spiller.navn(), "drar innover mot kjernen til vulkanen.\n")
pause()
for x in range(2):
fiende = generer_hellhound(spiller)
if not angrip(spiller, fiende, inv, klasser, spellbook):
cerberus = False
break
print(spiller.navn(), "går dypere inn mot vulkanens kjerne.\n")
pause()
if not cerberus: break
cerberusDialog(spiller)
fiende = generer_cerberus(spiller)
if angrip(spiller, fiende, inv, klasser, spellbook):
qlog.hent_quest(4).progresser()
cerberus = False
while klartenker:
medlem = spiller.spesialisering() == "Klartenker"
print(" "*4 + "Velkommen {}til de Klartenkendes Forening!".format("tilbake " * int(medlem)).center(65 + 20*int(not medlem), "-"))
if medlem:
print("\n Som medlem tilbyr vi deg internpriser på konsentrasjonspulver!")
print("\n Konsentrasjonspulver +700kp 2500g (k)")
print(" Meld deg ut Fjerner spesialisering 500g (u)")
print("\nDu har", inv.penger(), "gullstykker. Skriv 'f' eller 'ferdig' for å dra tilbake.")
inn = input("Hva vil du gjøre?\n> ").lower().strip()
if inn in {"f", "ferdig"}:
klartenker = False
elif inn == "k":
if inv.penger() >= 2500:
inv.legg_til_item(Item("Konsentrasjonspulver", "restoring", kp=700))
inv.penger(-2500)
print("Du kjøpte en stripe konsentrasjonspulver for 2500 gullstykker.")
else:
print("Du har ikke råd!")
pause()
elif inn == "u":
print("De Klartenkendes Forening krever 500 i gebyr for papirarbeid.")
inn = input("Er du sikker på at du vil melde deg ut? \nDu må betale ny medlemsavgift om du vil melde deg inn igjen. (ja/nei)\n> ").lower().strip()
if inn in {"j", "ja", "sure"}:
if inv.penger() >= 500:
inv.penger(-500)
spiller.spesialisering(False)
spiller.hev_kp(-250)
print("Du har meldt deg ut av Foreningen for Klartenkere.")
klartenker = False
inv.fjern_spesialiserte_items("Klartenker")
pause()
else:
print("Du har ikke råd!")
pause()
else:
print("\n Dersom du er fremtidsrettet nok til å bli medlem av foreningen vår, vil du ha tilgang ")
print(" til en eksklusiv trylleformel som regenererer ekstra konsentrasjonspoeng over tid! Og ")
print(" om dette i seg selv ikke har overbevist deg om at klartenkere helt klart triumferer")
print(" alle andre spesialiseringer, vil du og få en permanent bonus på 250 konsentrasjonspoeng,")
print(" samt tilgang til prima-kvalitets pulver som får konsentrasjonen din rett opp igjen,")
print(" selvfølgelig til en ekstra god pris! I tillegg til dette vil du og ha mulighet til å")
print(" bruke ting du finner på din ferd som kun en klartenker vil vite hvordan skal brukes!")
print(" Det eneste du trenger å gjøre, er å betale en minimal medlemsavgift på 8000 gullstykker, ")
print(" et skikkelig kupp!")
print("\nDu har", inv.penger(), "gullstykker.")
inn = input("Vil du bli en klartenker?\n> ").lower().strip()
if inn in {"ja", "j", "yes", "ja!", "hell yes!"}:
if inv.penger() >= 8000 and not spiller.spesialisering():
inv.penger(-8000)
spiller.spesialisering("Klartenker")
spiller.hev_kp(250)
print("\nGratulerer! Du er nå offisielt en Klartenker!\n")
pause()
elif inv.penger() >= 8000:
print("\n Du har allerede en spesialisering! Men frykt ikke, ønsker du likevel å bli ")
print(" en klartenker, kan du melde deg ut av den foreningen du for øyeblikket er ")
print(" medlem i og komme tilbake senere!\n")
klartenker = False
pause()
else:
print("Du har ikke nok gullstykker til å betale medlemsavgiften!")
klartenker = False
pause()
else:
klartenker = False
if ferdig:
return verdenskart(spiller)
def angrip(spiller, fiende, inv, klasser, spellbook, intro=False, hule=False):
qlog = klasser.questlog(3)
skriv_ut(spiller, [fiende], spellbook)
hoder = 1 + 2 * int(fiende.navn() == "Cerberus")
while True:
inn = input("\nHva vil du gjøre?\n> ").lower()
#tur angir at det er brukeren sin tur til å handle.
tur = kommandoer(inn, spiller, fiende, inv, klasser, spellbook)[0]
if inn == "f" or inn == "flykt":
print(spiller.navn(), "drar tilbake til forskningslaben.")
return False
#Her sjekkes om fienden er død. Om så, får karakteren loot og xp.
if fiende.dead():
print("--------------------------------------------------------------------"+\
"\nDu vant!", fiende.navn() + fiende.ending(), "er overvunnet!",spiller.navn(),"får",fiende.xp(),"erfaringspoeng.")
spiller.kons()
spiller.gi_xp(fiende.xp())
fiende.loot(spiller, inv)
spellbook.reset()
#progresserer quests
if qlog.hent_quest(1).startet() and not qlog.hent_quest(1).sjekk_ferdig() \
and not randint(0, 3) and fiende.navn() == "Helveteshund":
print("Denne hunden er merket med Obsidian-logo! Du drar den med deg tilbake til forskningslaben.")
pause()
qlog.hent_quest(1).progresser()
return False
if not qlog.hent_quest(6).sjekk_ferdig() and not randint(0, 7) and fiende.race() == "troll":
print("Du fant en seksjon fra noen forskningsresultater på trollet! Hvem kan det være sitt?")
qlog.hent_quest(6).progresser()
if not randint(0, 59) and not klasser.questlog(2).hent_quest(5).sjekk_ferdig() and fiende.race() == "troll":
print(spiller.navn(), 'fant et "Trolling Stones"-album! Kanskje noen i hytten hører på slikt?')
klasser.questlog(2).hent_quest(5).progresser()
input("Trykk enter for å fortsette\n> ")
return True
elif not tur:
skadeTatt = spiller.hp()
if hule:
print(spiller.navn(), "mistet", spiller.mist_kp(randint(15, 30)), "kp fra krystallene.")
if hoder == 3 and fiende.hp() / fiende.xHp() <= 2/3 and randint(1, 10) >= 4 \
or hoder == 2 and fiende.hp() / fiende.xHp() <= 1/3 and randint(1, 10) >= 4:
print(fiende.navn(), "mistet et hode!")
hoder -= 1
for x in range(hoder):
if fiende.oppholdt():
print(fiende.navn() + fiende.ending(), "er oppholdt.")
elif fiende.kp() >= 90 and (not randint(0, 5) or intro) and not fiende.burning() and fiende.race() == "cerberus":
if randint(0, 1):
print(fiende.navn() + fiende.ending(), "satte fyr på seg selv!")
fiende.a(50)
fiende.d(70)
fiende.sett_burning()
else:
print(fiende.navn() + fiende.ending(), "satte fyr på", spiller.navn() + "!")
print(spiller.navn(), "mistet", spiller.mist_liv(round(spiller.xHp() * 0.05)), "liv fra flammene.")
spiller.sett_burning(CD=3, dmg=round(spiller.xHp() * 0.05))
fiende.kp(-90)
elif fiende.kp() >= 50 and randint(0, 100) == 1:
print(fiende.navn() + fiende.ending(), "kastet Restituer!")
print(fiende.navn() + fiende.ending(), "restorerte", fiende.restorer(randint(90, 110)), "hp!")
fiende.kp(-50)
else:
spiller.angrepet(fiende)
#progresserer Smertedreper-quest.
if klasser.questlog(4).hent_quest(6).startet():
klasser.questlog(4).hent_quest(6).progresser(skadeTatt - spiller.hp())
#gir beskjed om karakteren døde
if spiller.dead():
input("\nDu døde! Trykk enter for å fortsette\n> ")
spellbook.reset()
write_player_died(spiller, "forskningslaben")
player_died(spiller, inv, klasser)
return False
#skriver ut hp og kp til karakteren og hp til fienden til neste runde.
spiller.kons()
fiende.gen_kons()
skriv_ut(spiller, [fiende], spellbook)
if fiende.burning():
print(fiende.navn() + fiende.ending(), "brenner!")
def generer_vulkan_fiende(spiller):
tall = randint(1, 10)
if tall == 1:
fiende = generer_gnom(spiller, 0, False)
elif tall <= 5:
fiende = generer_troll(spiller)
else:
fiende = generer_hellhound(spiller)
return fiende
def generer_hellhound(spiller, sterk=False):
loot = Loot()
fiende = Fiende(navn="Helveteshund", race="cerberus", loot=loot, \
hp=120 + 40 * randint(1, spiller.lvl()) + 400 * int(sterk), \
a=20 + randint(0, 10 * spiller.lvl()), \
d=30 + randint(0, 10 * spiller.lvl()) + 120 * int(sterk), \
kp=90 + randint(0, 2 * spiller.lvl()) + 50 * int(sterk), bonusKp=2, ending="en")
dynamiskLoot(loot, fiende, spiller)
skrivHellhound()
print("\n" + spiller.navn(), "har møtt på en helveteshund!")
return fiende
def generer_beta(spiller, nr):
lvl = spiller.lvl()
loot = Loot()
navn = ["Churchill", "Roosevelt"]
fiende = Fiende(navn[nr - 1], "cerberus", loot, \
hp=2600 + randint(0, 2) * 100, \
d=170 + randint(0, 3) * 10, \
a=180 + randint(0, 3) * 10, \
kp=190 + randint(0, 3) * 10, bonusKp=5 + randint(0, 1))
#loot
loot.legg_til_item(randint(500, 700), 25)
item = Item("Helvetesbriller", "beard", d=randint(0, 3)*10, \
xKp=randint(20, 40 + lvl), ekstraKp=randint(2, 1 + int(lvl / 5)))
item.sett_loot_tekst("et par briller fra helvete")
loot.legg_til_item(item, 25)
item = Item("Ledende hatt", "hat", d=randint(6, 9 + int(lvl / 10)) * 10, \
xHp=randint(8, 12 + int(lvl/10)) * 10)
item.sett_loot_tekst("en hatt for semi-ledere")
loot.legg_til_item(item, 25)
item = Item("Kjærlighetslapp", "trinket", d=randint(0, 1) * 10, \
xKp=randint(2, 4) * 10, ekstraKp=randint(2, 2 + int(lvl / 10)))
loot.legg_til_item(item, 25)
skrivHellhound(nr)
print(spiller.navn(), "har møtt", navn[nr - 1] + "!")
return fiende
def generer_cerberus(spiller):
lvl = spiller.lvl()
loot = Loot()
fiende = Fiende("Cerberus", "cerberus", loot, hp=7500, a=150, d=150, kp=350, bonusKp=7)
#loot
loot.legg_til_item(3000, 25)
item = Item("Treegget sverd", "weapon", a=randint(15, 17 + int(lvl / 10)), xHp=-10, xKp=-10, blade=True)
item.sett_loot_tekst("et treegget sverd")
loot.legg_til_item(item, 25)
item = Item("Hundeskinnshatt", "hat", a=20, xHp=150, d=70)
loot.legg_til_item(item, 25)
item = Item("Ctøvler", "shoes", d=50, xHp=50, a=30)
item.sett_loot_tekst("et par ctøvler")
loot.legg_til_item(item, 25)
skrivCerberus()
print("\n" + spiller.navn(), "har møtt på Cerberus!")
return fiende
def cerberusDialog(spiller):
print(""" Endelig har du kommet! Jeg har ventet deg lenge! Jeg har sett deg
gjennom øynene til Roosevelt og Churchill; gjennom alle i skaret. Du har kommet
mot meg som en ustoppelig kraft; en skjebne jeg ikke kan unslippe. Jeg lurer
på hva du tror du er: Hvordan du ser deg selv. Jeg undrer meg om at alt du gjør
i ditt hode er heroisk. Det er få ting som separerer en helt og en skurk.
Begge tror de gjør det rette.
""")
pause()
print(""" Du skal få betale for alt du har gjort mot familien min. Alt du
har gjort mot meg! Jeg følte det når du kuttet strupen på flere titall av
skaret mitt. Du kan ikke tenke deg smerten av å leve etter å føle død.
Den som kom før deg var noen av en annen natur. Han så livet med et perspektiv
som gjør ham til min velgjører. Han har gitt meg sjansen til å hevne brødrene mine.
Du slipper ikke unna.
""")
pause()
def dynamiskLoot(loot, fiende, spiller):
tall = round(10 + fiende.xp() / 10)
loot.legg_til_item(tall, 60)
dmg = 150 + randint(0, int(spiller.lvl() / 2.5)) * 25
item = Item("Tryllepulver", "damaging", dmg=dmg)
item.sett_loot_tekst("en håndfull tryllepulver")
loot.legg_til_item(item, 10)
kpkp = int(randint(1, spiller.lvl()) /10) *25 + 100
item = Item("Konsentrasjonspulver", "restoring", kp=kpkp)
item.sett_loot_tekst("en stripe konsentrasjonspulver")
loot.legg_til_item(item, 10)
tdhp = randint(1, spiller.lvl()) * 5 + 175
item = Item("Trolldrikk", "restoring", hp=tdhp)
loot.legg_til_item(item, 15)
a = randint(0, 4 * spiller.lvl())
xKp = randint(0, 3 * spiller.lvl())
item = Item("Tryllestav", "weapon", a=a, xKp=xKp)
loot.legg_til_item(item, 5)
a = randint(40, 40 + 5 * spiller.lvl())
item = Item("Sverd", "weapon", a=a, blade=True)
item.sett_loot_tekst("et sverd")
loot.legg_til_item(item, 5)
xKp = randint(0, 2 * spiller.lvl())
d = randint(0, 3 * spiller.lvl())
item = Item("Flammende hansker", "gloves", xKp=xKp, d=d)
item.sett_loot_tekst("et par flammende hansker")
loot.legg_til_item(item, 5)
hp = randint(0, 4) * 5
kp = randint(1, 5) * 5
ekp = int(randint(0, 10 + spiller.lvl()) / 10)
item = Item("Brennende øyne", "trinket", xHp=hp, xKp=kp, ekstraKp=ekp)
item.sett_loot_tekst("et par brennende øyne")
loot.legg_til_item(item, 4)
def bossLoot(loot):
loot.legg_til_item(500, 50)
def cerberus_kart(qlog):
skrivVulkan()
print("""
Velkommen til forskningslaben Obsidian! Her er stedene du kan dra:
Vulkanen (v) Dra til vulkanen og sloss mot helvetes søte biskevisker
Butikken (k) Kjøp det du trenger hos "Smolderbrødrenes Smie"
Forskningslaben (q) Se om ved forskningslaben utenfor trenger din hjelp""")
if qlog.hent_qLog()[2].startet() and not qlog.hent_qLog()[2].ferdig():
print(" Krystallhulen (h) Dra til krystallhulen og undersøk")
if qlog.hent_qLog()[3].startet():
print(" Nord-krystallhulen (h) Dra til den nordlige krystallhulen")
if qlog.hent_qLog()[3].startet():
print(" Sør-krystallhulen (s) Dra til den sørlige krystallhulen")
if qlog.hent_qLog()[4].startet():
print(" Kjernen til vulkanen (c) Ferd inn mot kjernen av vulkanen og konfronter Cerberus")
if qlog.hent_qLog()[5].ferdig():
print(" Ned kjelleren (n) Besøk hovedkontoret til de Klartenkendes Forening")
print(" Minnesteinen (l) Lagre sjelen din i Obsidians lokale minnestein")
print(" Ut i verden (f) Viser deg kart over alle stedene du kan dra\n")
def cerberusButikk(butikk):
butikk.legg_til_hadeTekst("\nIkke brenn deg der ute!\n")
item = Item("Tryllepulver", "damaging", dmg=250)
vare = Vare(item, 300, "t")
butikk.legg_til_vare(vare)
item = Item("Trolldrikk", "restoring", hp=300)
vare = Vare(item, 400, "d")
butikk.legg_til_vare(vare)
item = Item("Konsentrasjonspulver", "restoring", kp=150)
vare = Vare(item, 500, "k")
butikk.legg_til_vare(vare)
item = Item("Tryllestav", "weapon", a=60, xKp=45)
vare = Vare(item, 1000, "w")
butikk.legg_til_vare(vare)
item = Item("Vulkansverd", "weapon", a=140, xHp=30, blade=True)
vare = Vare(item, 6500, "v")
butikk.legg_til_vare(vare)
item = Item("Frysehansker", "gloves", xKp=10, xHp=30, d=20)
vare = Vare(item, 3500, "h")
butikk.legg_til_vare(vare)
def cerberusQuest(qlog, spiller):
navn = spiller.navn()
#q1
desk = cerberus_q1(navn)
ferdigDesk = cerberus_q1_ferdig(navn)
q = Quest(desk, ferdigDesk, 5, 15, "Forsker Frederikk")
q.legg_til_reward(xp=3500, gull=200, settTilgjengelig=True, settTilgjengeligIndeks=1)
q.legg_til_progresjonTekst("Fiender nedkjølt: ")
q.legg_til_svarTekst("\nEr du klar for å prøve ut formelen? (ja/nei)\n> ")
q.legg_til_ekstra_tekst(navn + " lærte seg Nedkjøl (n) med 100% treffsikkerhet!")
qlog.legg_til_quest(q)
#q2
desk = cerberus_q2(navn)
ferdigDesk = cerberus_q2_ferdig(navn)
q = Quest(desk, ferdigDesk, 3, 16, "Forsker Frederikk", tilgjengelig=False)
q.legg_til_reward(xp=3000, gull=400, settTilgjengelig=True, settTilgjengeligIndeks=2)
q.legg_til_progresjonTekst("Hunder slept tilbake: ")
q.legg_til_svarTekst("\nVil du hjelpe oss hente tilbake hundene? (ja/nei)\n> ")
qlog.legg_til_quest(q)
#q3
desk = cerberus_q3(navn)
ferdigDesk = cerberus_q3_ferdig(navn)
q = Quest(desk, ferdigDesk, 1, 17, "Forsker Frederikk", tilgjengelig=False)
q.legg_til_reward(xp=4000, settTilgjengelig=True, settTilgjengeligIndeks=3)
q.legg_til_progresjonTekst("Hule utforsket: ")
q.legg_til_svarTekst("\nVil du dra til krystallhulen og utforske? (ja/nei)\n> ")
qlog.legg_til_quest(q)
#q4
desk = cerberus_q4(navn)
ferdigDesk = cerberus_q4_ferdig(navn)
q = Quest(desk, ferdigDesk, 1, 17, "Forsker Frederikk", tilgjengelig=False)
q.legg_til_reward(xp=7000, gull=800, settTilgjengelig=True, settTilgjengeligIndeks=4)
q.legg_til_progresjonTekst("Duppeditt i den nordlige krystallhulen plassert: ")
q.legg_til_svarTekst("\nKan du plassere duppedingsene? (ja/nei)\n> ")
q.legg_til_progresjon(1)
q.legg_til_progresjonTekstListe("Duppeditt i den sørlige krystallhulen plassert: ", 0)
qlog.legg_til_quest(q)
#q5
desk = cerberus_q5(navn)
ferdigDesk = cerberus_q5_ferdig(navn)
q = Quest(desk, ferdigDesk, 1, 18, "Forsker Frederikk", tilgjengelig=False)
q.legg_til_reward(xp=10000, gull=1000)
q.legg_til_progresjonTekst("Cerberus fanget: ")
q.legg_til_svarTekst("\nEr du magikeren til å bringe inn den mektige Cerberus? (ja/nei)\n> ")
qlog.legg_til_quest(q)
#klartenker-quest
desk = klartenker_intro(navn)
ferdigDesk = klartenker_intro_ferdig(navn)
q = Quest(desk, ferdigDesk, 12500, 20, "Klara Klartenker")
q.legg_til_reward(xp=10000, gull=5000, kp=50)
q.legg_til_progresjonTekst("Konsentrajsonspoeng brukt: ")
q.legg_til_svarTekst("\nØnsker du å søke om å spesialisere deg som Klartenker? (ja/nei)\n> ")
qlog.legg_til_quest(q)
#bq1
desk = cerberus_bq1(navn)
ferdigDesk = cerberus_bq1_ferdig(navn)
q = Quest(desk, ferdigDesk, 5, 1, "Marinbiolog Marie", bonus=True, resetIfDead=True)
item = Item("Tang", "beard", xKp=65, ekstraKp=4)
q.legg_til_reward(xp=5000, item=item, gp=2)
q.legg_til_progresjonTekst("Seksjoner funnet: ")
q.legg_til_svarTekst("\nVil du gi forskningsresultatene til Marie? (ja/nei)\n> ")
q.legg_til_ekstra_tekst("Tusen takk " + navn + "! Nå kan jeg endelig dra tilbake til hytten min!\n" + \
"Her har du skjegget mitt som takk!")
q.legg_til_alt_desk("Vil du gi forskningsresultatene til Forsker Frederikk og la ham ta æren istedenfor?\n> ")
q.legg_til_alt_ekstra_tekst("Du gir forskningsresultatene til Forsker Frederikk. \n " + \
"Marinbiolog Marie ser sitt livs verk bli stjelt foran øynene hennes. \nForsker Frederikk gir deg en god slump gullstykker.")
q.legg_til_alt_reward(xp=5000, gull=2500, ep=3)
qlog.legg_til_quest(q)
| {
"content_hash": "094828e6022954507f399def92e50b3c",
"timestamp": "",
"source": "github",
"line_count": 596,
"max_line_length": 168,
"avg_line_length": 44.104026845637584,
"alnum_prop": 0.5633417028075782,
"repo_name": "nikolhm/Pokus",
"id": "abc71e6da585ee615c3baf6022e3a96eb4cbb9a4",
"size": "26390",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cerberus.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "396739"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
from django import forms
from django.conf import settings
from django.test.client import RequestFactory
from django.template import loader, Template
from drf_toolbox.renderers import APIRenderer
from rest_framework import generics, serializers, viewsets
from rest_framework.renderers import BrowsableAPIRenderer
from rest_framework.request import Request
from tests.compat import mock
import unittest
class RendererTests(unittest.TestCase):
"""A test suite to ensure that the renderer for Django REST Framework
that is included with DRF Toolbox works as expected.
"""
def setUp(self):
self.request = Request(RequestFactory().get('/something/'))
self.renderer = APIRenderer()
self.renderer.accepted_media_type = 'text/html'
self.renderer.renderer_context = {
'request': self.request,
}
def test_render_method(self):
"""Test that the DRF Toolbox renderer adds a version number
and calls the superclass renderer.
"""
with mock.patch.object(settings, 'VERSION',
create=True, new='24.60.1'):
with mock.patch.object(BrowsableAPIRenderer, 'render') as m:
self.renderer.render('foo', 'application/json', {})
m.assert_called_with('foo', 'application/json', {
'version': '24.60.1',
})
| {
"content_hash": "1c6960cb4e66c55bbc08aa957e5462ab",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 73,
"avg_line_length": 40.30555555555556,
"alnum_prop": 0.6650585802894555,
"repo_name": "pombredanne/drf-toolbox",
"id": "de9eec474bb9424b263e636a166ae0a31a3381ec",
"size": "1451",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_renderer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "110854"
}
],
"symlink_target": ""
} |
import tensorflow as tf
import os
import fnmatch
import numpy as np
def get_image_paths(im_dir, fname_pattern):
'''
Recusively get all file paths ending with `image_suffix` under `dir`
dir: directory path
return: list of image paths
'''
paths = []
for root, dirnames, filenames in os.walk(im_dir):
for filename in fnmatch.filter(filenames, fname_pattern):
paths.append(os.path.join(root, filename))
return paths
def get_input_batch_tensor(input_reader_fn,
inputpaths,
batch_size,
shuffle=True,
num_epochs=None,
path_queue_capacity=200,
data_queue_capacity=None,
min_after_dequeue=30,
num_threads=1):
with tf.variable_scope('batch_input_producer'):
path_queue = tf.train.string_input_producer(inputpaths,
capacity=path_queue_capacity,
shuffle=shuffle,
num_epochs=num_epochs)
if not data_queue_capacity:
data_queue_capacity = min_after_dequeue + 3 * batch_size
return tf.train.batch(input_reader_fn(path_queue),
batch_size,
num_threads=num_threads,
capacity=data_queue_capacity,
enqueue_many=True) | {
"content_hash": "c741245c6abf14947be22e9c4e6fe270",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 81,
"avg_line_length": 39.575,
"alnum_prop": 0.4883133291219204,
"repo_name": "roytseng-tw/DCGAN-tf",
"id": "be2a9e358db36f04a56f9b39586c52608ea7f906",
"size": "1583",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "input_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35520"
}
],
"symlink_target": ""
} |
from Tkinter import Tk
from RC_UI import RC_UI
from CommandSender import CommandSender
def main():
root = Tk()
#root.geometry("250x150+300+300")
sender = CommandSender()
app = RC_UI(root, sender)
root.mainloop()
if __name__ == '__main__':
main() | {
"content_hash": "163be0d205cebd20d334d5deba259138",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 39,
"avg_line_length": 19.571428571428573,
"alnum_prop": 0.6313868613138686,
"repo_name": "xfleckx/BeMoBI_Tools",
"id": "7ddbff38642bf7ade59ca750f6a6c240a34f710a",
"size": "274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/RemoteControl/RCApp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "88542"
},
{
"name": "Matlab",
"bytes": "7541"
},
{
"name": "Processing",
"bytes": "5163"
},
{
"name": "Python",
"bytes": "129950"
}
],
"symlink_target": ""
} |
"""Config sub-commands"""
import getpass
import locale
import logging
import os
import platform
import subprocess
import sys
from typing import Optional
from urllib.parse import urlsplit, urlunsplit
import requests
import tenacity
from rich.console import Console
from airflow import configuration
from airflow.cli.simple_table import SimpleTable
from airflow.providers_manager import ProvidersManager
from airflow.typing_compat import Protocol
from airflow.utils.cli import suppress_logs_and_warning
from airflow.version import version as airflow_version
log = logging.getLogger(__name__)
class Anonymizer(Protocol):
"""Anonymizer protocol."""
def process_path(self, value) -> str:
"""Remove pii from paths"""
def process_username(self, value) -> str:
"""Remove pii from username"""
def process_url(self, value) -> str:
"""Remove pii from URL"""
class NullAnonymizer(Anonymizer):
"""Do nothing."""
def _identity(self, value):
return value
process_path = process_username = process_url = _identity
del _identity
class PiiAnonymizer(Anonymizer):
"""Remove personally identifiable info from path."""
def __init__(self):
home_path = os.path.expanduser("~")
username = getpass.getuser()
self._path_replacements = {home_path: "${HOME}", username: "${USER}"}
def process_path(self, value):
if not value:
return value
for src, target in self._path_replacements.items():
value = value.replace(src, target)
return value
def process_username(self, value):
if not value:
return value
return value[0] + "..." + value[-1]
def process_url(self, value):
if not value:
return value
url_parts = urlsplit(value)
netloc = None
if url_parts.netloc:
# unpack
userinfo = None
host = None
username = None
password = None
if "@" in url_parts.netloc:
userinfo, _, host = url_parts.netloc.partition("@")
else:
host = url_parts.netloc
if userinfo:
if ":" in userinfo:
username, _, password = userinfo.partition(":")
else:
username = userinfo
# anonymize
username = self.process_username(username) if username else None
password = "PASSWORD" if password else None
# pack
if username and password and host:
netloc = username + ":" + password + "@" + host
elif username and host:
netloc = username + "@" + host
elif password and host:
netloc = ":" + password + "@" + host
elif host:
netloc = host
else:
netloc = ""
return urlunsplit((url_parts.scheme, netloc, url_parts.path, url_parts.query, url_parts.fragment))
class OperatingSystem:
"""Operating system"""
WINDOWS = "Windows"
LINUX = "Linux"
MACOSX = "Mac OS"
CYGWIN = "Cygwin"
@staticmethod
def get_current() -> Optional[str]:
"""Get current operating system"""
if os.name == "nt":
return OperatingSystem.WINDOWS
elif "linux" in sys.platform:
return OperatingSystem.LINUX
elif "darwin" in sys.platform:
return OperatingSystem.MACOSX
elif "cygwin" in sys.platform:
return OperatingSystem.CYGWIN
return None
class Architecture:
"""Compute architecture"""
X86_64 = "x86_64"
X86 = "x86"
PPC = "ppc"
ARM = "arm"
@staticmethod
def get_current():
"""Get architecture"""
return _MACHINE_TO_ARCHITECTURE.get(platform.machine().lower())
_MACHINE_TO_ARCHITECTURE = {
"amd64": Architecture.X86_64,
"x86_64": Architecture.X86_64,
"i686-64": Architecture.X86_64,
"i386": Architecture.X86,
"i686": Architecture.X86,
"x86": Architecture.X86,
"ia64": Architecture.X86, # Itanium is different x64 arch, treat it as the common x86.
"powerpc": Architecture.PPC,
"power macintosh": Architecture.PPC,
"ppc64": Architecture.PPC,
"armv6": Architecture.ARM,
"armv6l": Architecture.ARM,
"arm64": Architecture.ARM,
"armv7": Architecture.ARM,
"armv7l": Architecture.ARM,
}
class _BaseInfo:
def info(self, console: Console) -> None:
"""
Print required information to provided console.
You should implement this function in custom classes.
"""
raise NotImplementedError()
def show(self) -> None:
"""Shows info"""
console = Console()
self.info(console)
def render_text(self) -> str:
"""Exports the info to string"""
console = Console(record=True)
with console.capture():
self.info(console)
return console.export_text()
class AirflowInfo(_BaseInfo):
"""All information related to Airflow, system and other."""
def __init__(self, anonymizer: Anonymizer):
self.airflow_version = airflow_version
self.system = SystemInfo(anonymizer)
self.tools = ToolsInfo(anonymizer)
self.paths = PathsInfo(anonymizer)
self.config = ConfigInfo(anonymizer)
self.provider = ProvidersInfo()
def info(self, console: Console):
console.print(
f"[bold][green]Apache Airflow[/bold][/green]: {self.airflow_version}\n", highlight=False
)
self.system.info(console)
self.tools.info(console)
self.paths.info(console)
self.config.info(console)
self.provider.info(console)
class SystemInfo(_BaseInfo):
"""Basic system and python information"""
def __init__(self, anonymizer: Anonymizer):
self.operating_system = OperatingSystem.get_current()
self.arch = Architecture.get_current()
self.uname = platform.uname()
self.locale = locale.getdefaultlocale()
self.python_location = anonymizer.process_path(sys.executable)
self.python_version = sys.version.replace("\n", " ")
def info(self, console: Console):
table = SimpleTable(title="System info")
table.add_column()
table.add_column(width=100)
table.add_row("OS", self.operating_system or "NOT AVAILABLE")
table.add_row("architecture", self.arch or "NOT AVAILABLE")
table.add_row("uname", str(self.uname))
table.add_row("locale", str(self.locale))
table.add_row("python_version", self.python_version)
table.add_row("python_location", self.python_location)
console.print(table)
class PathsInfo(_BaseInfo):
"""Path information"""
def __init__(self, anonymizer: Anonymizer):
system_path = os.environ.get("PATH", "").split(os.pathsep)
self.airflow_home = anonymizer.process_path(configuration.get_airflow_home())
self.system_path = [anonymizer.process_path(p) for p in system_path]
self.python_path = [anonymizer.process_path(p) for p in sys.path]
self.airflow_on_path = any(
os.path.exists(os.path.join(path_elem, "airflow")) for path_elem in system_path
)
def info(self, console: Console):
table = SimpleTable(title="Paths info")
table.add_column()
table.add_column(width=150)
table.add_row("airflow_home", self.airflow_home)
table.add_row("system_path", os.pathsep.join(self.system_path))
table.add_row("python_path", os.pathsep.join(self.python_path))
table.add_row("airflow_on_path", str(self.airflow_on_path))
console.print(table)
class ProvidersInfo(_BaseInfo):
"""providers information"""
def info(self, console: Console):
table = SimpleTable(title="Providers info")
table.add_column()
table.add_column(width=150)
for _, provider in ProvidersManager().providers.values():
table.add_row(provider['package-name'], provider['versions'][0])
console.print(table)
class ConfigInfo(_BaseInfo):
"""Most critical config properties"""
def __init__(self, anonymizer: Anonymizer):
self.executor = configuration.conf.get("core", "executor")
self.sql_alchemy_conn = anonymizer.process_url(
configuration.conf.get("core", "SQL_ALCHEMY_CONN", fallback="NOT AVAILABLE")
)
self.dags_folder = anonymizer.process_path(
configuration.conf.get("core", "dags_folder", fallback="NOT AVAILABLE")
)
self.plugins_folder = anonymizer.process_path(
configuration.conf.get("core", "plugins_folder", fallback="NOT AVAILABLE")
)
self.base_log_folder = anonymizer.process_path(
configuration.conf.get("logging", "base_log_folder", fallback="NOT AVAILABLE")
)
self.remote_base_log_folder = anonymizer.process_path(
configuration.conf.get("logging", "remote_base_log_folder", fallback="NOT AVAILABLE")
)
@property
def task_logging_handler(self):
"""Returns task logging handler."""
def get_fullname(o):
module = o.__class__.__module__
if module is None or module == str.__class__.__module__:
return o.__class__.__name__ # Avoid reporting __builtin__
else:
return module + '.' + o.__class__.__name__
try:
handler_names = [get_fullname(handler) for handler in logging.getLogger('airflow.task').handlers]
return ", ".join(handler_names)
except Exception: # noqa pylint: disable=broad-except
return "NOT AVAILABLE"
def info(self, console: Console):
table = SimpleTable(title="Config info")
table.add_column()
table.add_column(width=150)
table.add_row("executor", self.executor)
table.add_row("task_logging_handler", self.task_logging_handler)
table.add_row("sql_alchemy_conn", self.sql_alchemy_conn)
table.add_row("dags_folder", self.dags_folder)
table.add_row("plugins_folder", self.plugins_folder)
table.add_row("base_log_folder", self.base_log_folder)
console.print(table)
class ToolsInfo(_BaseInfo):
"""The versions of the tools that Airflow uses"""
def __init__(self, anonymize: Anonymizer):
del anonymize # Nothing to anonymize here.
self.git_version = self._get_version(["git", "--version"])
self.ssh_version = self._get_version(["ssh", "-V"])
self.kubectl_version = self._get_version(["kubectl", "version", "--short=True", "--client=True"])
self.gcloud_version = self._get_version(["gcloud", "version"], grep=b"Google Cloud SDK")
self.cloud_sql_proxy_version = self._get_version(["cloud_sql_proxy", "--version"])
self.mysql_version = self._get_version(["mysql", "--version"])
self.sqlite3_version = self._get_version(["sqlite3", "--version"])
self.psql_version = self._get_version(["psql", "--version"])
def _get_version(self, cmd, grep=None):
"""Return tools version."""
try:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except OSError:
return "NOT AVAILABLE"
stdoutdata, _ = proc.communicate()
data = [f for f in stdoutdata.split(b"\n") if f]
if grep:
data = [line for line in data if grep in line]
if len(data) != 1:
return "NOT AVAILABLE"
else:
return data[0].decode()
def info(self, console: Console):
table = SimpleTable(title="Tools info")
table.add_column()
table.add_column(width=150)
table.add_row("git", self.git_version)
table.add_row("ssh", self.ssh_version)
table.add_row("kubectl", self.kubectl_version)
table.add_row("gcloud", self.gcloud_version)
table.add_row("cloud_sql_proxy", self.cloud_sql_proxy_version)
table.add_row("mysql", self.mysql_version)
table.add_row("sqlite3", self.sqlite3_version)
table.add_row("psql", self.psql_version)
console.print(table)
class FileIoException(Exception):
"""Raises when error happens in FileIo.io integration"""
@tenacity.retry(
stop=tenacity.stop_after_attempt(5),
wait=tenacity.wait_exponential(multiplier=1, max=10),
retry=tenacity.retry_if_exception_type(FileIoException),
before=tenacity.before_log(log, logging.DEBUG),
after=tenacity.after_log(log, logging.DEBUG),
)
def _upload_text_to_fileio(content):
"""Upload text file to File.io service and return lnk"""
resp = requests.post("https://file.io", data={"text": content})
if not resp.ok:
print(resp.json())
raise FileIoException("Failed to send report to file.io service.")
try:
return resp.json()["link"]
except ValueError as e:
log.debug(e)
raise FileIoException("Failed to send report to file.io service.")
def _send_report_to_fileio(info):
print("Uploading report to file.io service.")
try:
link = _upload_text_to_fileio(str(info))
print("Report uploaded.")
print(link)
print()
except FileIoException as ex:
print(str(ex))
@suppress_logs_and_warning
def show_info(args):
"""Show information related to Airflow, system and other."""
# Enforce anonymization, when file_io upload is tuned on.
anonymizer = PiiAnonymizer() if args.anonymize or args.file_io else NullAnonymizer()
info = AirflowInfo(anonymizer)
if args.file_io:
_send_report_to_fileio(info.render_text())
else:
info.show()
| {
"content_hash": "29d722e5fc0e03623d41fbef8deff005",
"timestamp": "",
"source": "github",
"line_count": 409,
"max_line_length": 109,
"avg_line_length": 33.7359413202934,
"alnum_prop": 0.6146542977243079,
"repo_name": "DinoCow/airflow",
"id": "9a325202e435e203d2dcc52a24a43ad0a1515658",
"size": "14583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airflow/cli/commands/info_command.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "56963"
},
{
"name": "HTML",
"bytes": "140781"
},
{
"name": "JavaScript",
"bytes": "1370838"
},
{
"name": "Mako",
"bytes": "1037"
},
{
"name": "Python",
"bytes": "1473771"
},
{
"name": "Shell",
"bytes": "18638"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from site_ctf import views
from django.views.defaults import page_not_found
admin.autodiscover()
handler404 = 'site_ctf.templates.404'
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'k6.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# url(r'^admin/', include(admin.site.urls)),
url(r'^', include('site_ctf.urls')),
)
| {
"content_hash": "facd660bc36ef24bb19c70b3d5a325c6",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 51,
"avg_line_length": 26.529411764705884,
"alnum_prop": 0.6718403547671841,
"repo_name": "Crypt0-M3lon/k6",
"id": "341b12a34b4148e74a6f5c92a0ebac69a9fd2619",
"size": "451",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "k6/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5339"
},
{
"name": "JavaScript",
"bytes": "74445"
},
{
"name": "Python",
"bytes": "24862"
}
],
"symlink_target": ""
} |
"""
WSGI config for slideshare project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "slideshare.settings")
application = get_wsgi_application()
| {
"content_hash": "508e6db4199ee087090bf17f63c35b6f",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.8125,
"alnum_prop": 0.7732997481108312,
"repo_name": "bahattincinic/django-101",
"id": "a37253867c9cffaf5c5961dc8bb34fcc3cbcd86f",
"size": "397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "slideshare/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "11543"
},
{
"name": "Python",
"bytes": "14568"
}
],
"symlink_target": ""
} |
"""Very low-level nginx config parser based on pyparsing."""
import string
from pyparsing import (
Literal, White, Word, alphanums, CharsNotIn, Forward, Group,
Optional, OneOrMore, Regex, ZeroOrMore, pythonStyleComment)
class RawNginxParser(object):
# pylint: disable=expression-not-assigned
"""A class that parses nginx configuration with pyparsing."""
# constants
left_bracket = Literal("{").suppress()
right_bracket = Literal("}").suppress()
semicolon = Literal(";").suppress()
space = White().suppress()
key = Word(alphanums + "_/")
# Matches anything that is not a special character AND any chars in single
# or double quotes
value = Regex(r"((\".*\")?(\'.*\')?[^\{\};,]?)+")
location = CharsNotIn("{};," + string.whitespace)
# modifier for location uri [ = | ~ | ~* | ^~ ]
modifier = Literal("=") | Literal("~*") | Literal("~") | Literal("^~")
# rules
assignment = (key + Optional(space + value) + semicolon)
location_statement = Optional(space + modifier) + Optional(space + location)
if_statement = Literal("if") + space + Regex(r"\(.+\)") + space
block = Forward()
block << Group(
(Group(key + location_statement) ^ Group(if_statement))
+ left_bracket
+ Group(ZeroOrMore(Group(assignment) | block))
+ right_bracket)
script = OneOrMore(Group(assignment) | block).ignore(pythonStyleComment)
def __init__(self, source):
self.source = source
def parse(self):
"""Returns the parsed tree."""
return self.script.parseString(self.source)
def as_list(self):
"""Returns the parsed tree as a list."""
return self.parse().asList()
class RawNginxDumper(object):
# pylint: disable=too-few-public-methods
"""A class that dumps nginx configuration from the provided tree."""
def __init__(self, blocks, indentation=4):
self.blocks = blocks
self.indentation = indentation
def __iter__(self, blocks=None, current_indent=0, spacer=' '):
"""Iterates the dumped nginx content."""
blocks = blocks or self.blocks
for key, values in blocks:
if current_indent:
yield spacer
indentation = spacer * current_indent
if isinstance(key, list):
yield indentation + spacer.join(key) + ' {'
for parameter in values:
if isinstance(parameter[0], list):
dumped = self.__iter__(
[parameter],
current_indent + self.indentation)
for line in dumped:
yield line
else:
dumped = spacer.join(parameter) + ';'
yield spacer * (
current_indent + self.indentation) + dumped
yield indentation + '}'
else:
yield spacer * current_indent + key + spacer + values + ';'
def as_string(self):
"""Return the parsed block as a string."""
return '\n'.join(self)
# Shortcut functions to respect Python's serialization interface
# (like pyyaml, picker or json)
def loads(source):
"""Parses from a string.
:param str souce: The string to parse
:returns: The parsed tree
:rtype: list
"""
return RawNginxParser(source).as_list()
def load(_file):
"""Parses from a file.
:param file _file: The file to parse
:returns: The parsed tree
:rtype: list
"""
return loads(_file.read())
def dumps(blocks, indentation=4):
"""Dump to a string.
:param list block: The parsed tree
:param int indentation: The number of spaces to indent
:rtype: str
"""
return RawNginxDumper(blocks, indentation).as_string()
def dump(blocks, _file, indentation=4):
"""Dump to a file.
:param list block: The parsed tree
:param file _file: The file to dump to
:param int indentation: The number of spaces to indent
:rtype: NoneType
"""
return _file.write(dumps(blocks, indentation))
| {
"content_hash": "39b32583a34283296948fd00b9a112aa",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 80,
"avg_line_length": 30.97761194029851,
"alnum_prop": 0.5832329559142375,
"repo_name": "digideskio/lets-encrypt-preview",
"id": "f24455d5999c19a67e45f66465c4ed876cc28ec9",
"size": "4151",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "letsencrypt_nginx/nginxparser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "13069"
},
{
"name": "DIGITAL Command Language",
"bytes": "133"
},
{
"name": "Groff",
"bytes": "222"
},
{
"name": "Nginx",
"bytes": "4274"
},
{
"name": "Python",
"bytes": "837044"
},
{
"name": "Shell",
"bytes": "2687"
}
],
"symlink_target": ""
} |
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('auditlog', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='logentry',
name='object_id',
field=models.BigIntegerField(db_index=True, null=True, verbose_name='object id', blank=True),
),
]
| {
"content_hash": "e7af8ddd422a59b827a980add05af50a",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 105,
"avg_line_length": 24.1875,
"alnum_prop": 0.5943152454780362,
"repo_name": "kbussell/django-auditlog",
"id": "42f8620a57b71d7b93a39effc85a380ccd18f10b",
"size": "411",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/auditlog/migrations/0002_auto_support_long_primary_keys.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "80631"
}
],
"symlink_target": ""
} |
import contextlib
import copy
import itertools
import mock
from oslo.config import cfg
import testtools
from webob import exc
import webtest
from quantum.api import extensions
from quantum.api.rpc.agentnotifiers import l3_rpc_agent_api
from quantum.api.v2 import attributes
from quantum.common import config
from quantum.common import constants as l3_constants
from quantum.common import exceptions as q_exc
from quantum.common.test_lib import test_config
from quantum import context
from quantum.db import db_base_plugin_v2
from quantum.db import l3_db
from quantum.db import models_v2
from quantum.extensions import l3
from quantum.manager import QuantumManager
from quantum.openstack.common import log as logging
from quantum.openstack.common.notifier import api as notifier_api
from quantum.openstack.common.notifier import test_notifier
from quantum.openstack.common import uuidutils
from quantum.tests.unit import test_api_v2
from quantum.tests.unit import test_db_plugin
from quantum.tests.unit import test_extensions
from quantum.tests.unit import testlib_api
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
_get_path = test_api_v2._get_path
class L3TestExtensionManager(object):
def get_resources(self):
return l3.L3.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class L3NatExtensionTestCase(testlib_api.WebTestCase):
fmt = 'json'
def setUp(self):
super(L3NatExtensionTestCase, self).setUp()
plugin = 'quantum.extensions.l3.RouterPluginBase'
# Ensure 'stale' patched copies of the plugin are never returned
QuantumManager._instance = None
# Ensure existing ExtensionManager is not used
extensions.PluginAwareExtensionManager._instance = None
# Save the global RESOURCE_ATTRIBUTE_MAP
self.saved_attr_map = {}
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.iteritems():
self.saved_attr_map[resource] = attrs.copy()
# Create the default configurations
args = ['--config-file', test_api_v2.etcdir('quantum.conf.test')]
config.parse(args=args)
# Update the plugin and extensions path
cfg.CONF.set_override('core_plugin', plugin)
cfg.CONF.set_override('allow_pagination', True)
cfg.CONF.set_override('allow_sorting', True)
self._plugin_patcher = mock.patch(plugin, autospec=True)
self.plugin = self._plugin_patcher.start()
instances = self.plugin.return_value
instances._RouterPluginBase__native_pagination_support = True
instances._RouterPluginBase__native_sorting_support = True
# Instantiate mock plugin and enable the 'router' extension
QuantumManager.get_plugin().supported_extension_aliases = (
["router"])
ext_mgr = L3TestExtensionManager()
self.ext_mdw = test_extensions.setup_extensions_middleware(ext_mgr)
self.api = webtest.TestApp(self.ext_mdw)
def tearDown(self):
self._plugin_patcher.stop()
self.api = None
self.plugin = None
cfg.CONF.reset()
# Restore the global RESOURCE_ATTRIBUTE_MAP
attributes.RESOURCE_ATTRIBUTE_MAP = self.saved_attr_map
super(L3NatExtensionTestCase, self).tearDown()
def test_router_create(self):
router_id = _uuid()
data = {'router': {'name': 'router1', 'admin_state_up': True,
'tenant_id': _uuid(),
'external_gateway_info': None}}
return_value = copy.deepcopy(data['router'])
return_value.update({'status': "ACTIVE", 'id': router_id})
instance = self.plugin.return_value
instance.create_router.return_value = return_value
instance.get_routers_count.return_value = 0
res = self.api.post(_get_path('routers', fmt=self.fmt),
self.serialize(data),
content_type='application/%s' % self.fmt)
instance.create_router.assert_called_with(mock.ANY,
router=data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
res = self.deserialize(res)
self.assertTrue('router' in res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], True)
def test_router_list(self):
router_id = _uuid()
return_value = [{'name': 'router1', 'admin_state_up': True,
'tenant_id': _uuid(), 'id': router_id}]
instance = self.plugin.return_value
instance.get_routers.return_value = return_value
res = self.api.get(_get_path('routers', fmt=self.fmt))
instance.get_routers.assert_called_with(mock.ANY, fields=mock.ANY,
filters=mock.ANY,
sorts=mock.ANY,
limit=mock.ANY,
marker=mock.ANY,
page_reverse=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertTrue('routers' in res)
self.assertEqual(1, len(res['routers']))
self.assertEqual(router_id, res['routers'][0]['id'])
def test_router_update(self):
router_id = _uuid()
update_data = {'router': {'admin_state_up': False}}
return_value = {'name': 'router1', 'admin_state_up': False,
'tenant_id': _uuid(),
'status': "ACTIVE", 'id': router_id}
instance = self.plugin.return_value
instance.update_router.return_value = return_value
res = self.api.put(_get_path('routers', id=router_id,
fmt=self.fmt),
self.serialize(update_data))
instance.update_router.assert_called_with(mock.ANY, router_id,
router=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertTrue('router' in res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], False)
def test_router_get(self):
router_id = _uuid()
return_value = {'name': 'router1', 'admin_state_up': False,
'tenant_id': _uuid(),
'status': "ACTIVE", 'id': router_id}
instance = self.plugin.return_value
instance.get_router.return_value = return_value
res = self.api.get(_get_path('routers', id=router_id,
fmt=self.fmt))
instance.get_router.assert_called_with(mock.ANY, router_id,
fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertTrue('router' in res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], False)
def test_router_delete(self):
router_id = _uuid()
res = self.api.delete(_get_path('routers', id=router_id))
instance = self.plugin.return_value
instance.delete_router.assert_called_with(mock.ANY, router_id)
self.assertEqual(res.status_int, exc.HTTPNoContent.code)
def test_router_add_interface(self):
router_id = _uuid()
subnet_id = _uuid()
port_id = _uuid()
interface_data = {'subnet_id': subnet_id}
return_value = copy.deepcopy(interface_data)
return_value['port_id'] = port_id
instance = self.plugin.return_value
instance.add_router_interface.return_value = return_value
path = _get_path('routers', id=router_id,
action="add_router_interface",
fmt=self.fmt)
res = self.api.put(path, self.serialize(interface_data))
instance.add_router_interface.assert_called_with(mock.ANY, router_id,
interface_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertTrue('port_id' in res)
self.assertEqual(res['port_id'], port_id)
self.assertEqual(res['subnet_id'], subnet_id)
class L3NatExtensionTestCaseXML(L3NatExtensionTestCase):
fmt = 'xml'
# This plugin class is just for testing
class TestL3NatPlugin(db_base_plugin_v2.QuantumDbPluginV2,
l3_db.L3_NAT_db_mixin):
__native_pagination_support = True
__native_sorting_support = True
supported_extension_aliases = ["router"]
def create_network(self, context, network):
session = context.session
with session.begin(subtransactions=True):
net = super(TestL3NatPlugin, self).create_network(context,
network)
self._process_l3_create(context, network['network'], net['id'])
self._extend_network_dict_l3(context, net)
return net
def update_network(self, context, id, network):
session = context.session
with session.begin(subtransactions=True):
net = super(TestL3NatPlugin, self).update_network(context, id,
network)
self._process_l3_update(context, network['network'], id)
self._extend_network_dict_l3(context, net)
return net
def delete_network(self, context, id):
session = context.session
with session.begin(subtransactions=True):
super(TestL3NatPlugin, self).delete_network(context, id)
def get_network(self, context, id, fields=None):
net = super(TestL3NatPlugin, self).get_network(context, id, None)
self._extend_network_dict_l3(context, net)
return self._fields(net, fields)
def get_networks(self, context, filters=None, fields=None,
sorts=[], limit=None, marker=None,
page_reverse=False):
nets = super(TestL3NatPlugin, self).get_networks(
context, filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
for net in nets:
self._extend_network_dict_l3(context, net)
return [self._fields(net, fields) for net in nets]
def delete_port(self, context, id, l3_port_check=True):
if l3_port_check:
self.prevent_l3_port_deletion(context, id)
self.disassociate_floatingips(context, id)
return super(TestL3NatPlugin, self).delete_port(context, id)
class L3NatTestCaseMixin(object):
def _create_network(self, fmt, name, admin_state_up, **kwargs):
"""Override the routine for allowing the router:external attribute."""
# attributes containing a colon should be passed with
# a double underscore
new_args = dict(itertools.izip(map(lambda x: x.replace('__', ':'),
kwargs),
kwargs.values()))
arg_list = new_args.pop('arg_list', ()) + (l3.EXTERNAL,)
return super(L3NatTestCaseMixin, self)._create_network(
fmt, name, admin_state_up, arg_list=arg_list, **new_args)
def _create_router(self, fmt, tenant_id, name=None,
admin_state_up=None, set_context=False,
arg_list=None, **kwargs):
data = {'router': {'tenant_id': tenant_id}}
if name:
data['router']['name'] = name
if admin_state_up:
data['router']['admin_state_up'] = admin_state_up
for arg in (('admin_state_up', 'tenant_id') + (arg_list or ())):
# Arg must be present and not empty
if arg in kwargs and kwargs[arg]:
data['router'][arg] = kwargs[arg]
router_req = self.new_create_request('routers', data, fmt)
if set_context and tenant_id:
# create a specific auth context for this request
router_req.environ['quantum.context'] = context.Context(
'', tenant_id)
return router_req.get_response(self.ext_api)
def _make_router(self, fmt, tenant_id, name=None,
admin_state_up=None, set_context=False):
res = self._create_router(fmt, tenant_id, name,
admin_state_up, set_context)
return self.deserialize(fmt, res)
def _add_external_gateway_to_router(self, router_id, network_id,
expected_code=exc.HTTPOk.code,
quantum_context=None):
return self._update('routers', router_id,
{'router': {'external_gateway_info':
{'network_id': network_id}}},
expected_code=expected_code,
quantum_context=quantum_context)
def _remove_external_gateway_from_router(self, router_id, network_id,
expected_code=exc.HTTPOk.code):
return self._update('routers', router_id,
{'router': {'external_gateway_info':
{}}},
expected_code=expected_code)
def _router_interface_action(self, action, router_id, subnet_id, port_id,
expected_code=exc.HTTPOk.code,
expected_body=None):
interface_data = {}
if subnet_id:
interface_data.update({'subnet_id': subnet_id})
if port_id and (action != 'add' or not subnet_id):
interface_data.update({'port_id': port_id})
req = self.new_action_request('routers', interface_data, router_id,
"%s_router_interface" % action)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, expected_code)
response = self.deserialize(self.fmt, res)
if expected_body:
self.assertEqual(response, expected_body)
return response
@contextlib.contextmanager
def router(self, name='router1', admin_state_up=True,
fmt=None, tenant_id=_uuid(), set_context=False):
router = self._make_router(fmt or self.fmt, tenant_id, name,
admin_state_up, set_context)
try:
yield router
finally:
self._delete('routers', router['router']['id'])
def _set_net_external(self, net_id):
self._update('networks', net_id,
{'network': {l3.EXTERNAL: True}})
def _create_floatingip(self, fmt, network_id, port_id=None,
fixed_ip=None, set_context=False):
data = {'floatingip': {'floating_network_id': network_id,
'tenant_id': self._tenant_id}}
if port_id:
data['floatingip']['port_id'] = port_id
if fixed_ip:
data['floatingip']['fixed_ip_address'] = fixed_ip
floatingip_req = self.new_create_request('floatingips', data, fmt)
if set_context and self._tenant_id:
# create a specific auth context for this request
floatingip_req.environ['quantum.context'] = context.Context(
'', self._tenant_id)
return floatingip_req.get_response(self.ext_api)
def _make_floatingip(self, fmt, network_id, port_id=None,
fixed_ip=None, set_context=False):
res = self._create_floatingip(fmt, network_id, port_id,
fixed_ip, set_context)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
return self.deserialize(fmt, res)
def _validate_floating_ip(self, fip):
body = self._list('floatingips')
self.assertEqual(len(body['floatingips']), 1)
self.assertEqual(body['floatingips'][0]['id'],
fip['floatingip']['id'])
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
@contextlib.contextmanager
def floatingip_with_assoc(self, port_id=None, fmt=None,
set_context=False):
with self.subnet(cidr='11.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
floatingip = None
try:
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action(
'add', r['router']['id'],
private_sub['subnet']['id'], None)
floatingip = self._make_floatingip(
fmt or self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'],
set_context=False)
yield floatingip
finally:
if floatingip:
self._delete('floatingips',
floatingip['floatingip']['id'])
self._router_interface_action(
'remove', r['router']['id'],
private_sub['subnet']['id'], None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
@contextlib.contextmanager
def floatingip_no_assoc(self, private_sub, fmt=None, set_context=False):
with self.subnet(cidr='12.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.router() as r:
floatingip = None
try:
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
floatingip = self._make_floatingip(
fmt or self.fmt,
public_sub['subnet']['network_id'],
set_context=set_context)
yield floatingip
finally:
if floatingip:
self._delete('floatingips',
floatingip['floatingip']['id'])
self._router_interface_action('remove', r['router']['id'],
private_sub['subnet']['id'],
None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
class L3NatTestCaseBase(L3NatTestCaseMixin,
test_db_plugin.QuantumDbPluginV2TestCase):
def setUp(self):
test_config['plugin_name_v2'] = (
'quantum.tests.unit.test_l3_plugin.TestL3NatPlugin')
# for these tests we need to enable overlapping ips
cfg.CONF.set_default('allow_overlapping_ips', True)
ext_mgr = L3TestExtensionManager()
test_config['extension_manager'] = ext_mgr
super(L3NatTestCaseBase, self).setUp()
# Set to None to reload the drivers
notifier_api._drivers = None
cfg.CONF.set_override("notification_driver", [test_notifier.__name__])
def tearDown(self):
test_notifier.NOTIFICATIONS = []
super(L3NatTestCaseBase, self).tearDown()
class L3NatDBTestCase(L3NatTestCaseBase):
def test_router_create(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name='router1', admin_state_up=True,
tenant_id=tenant_id) as router:
for k, v in expected_value:
self.assertEqual(router['router'][k], v)
def test_router_create_with_gwinfo(self):
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
data = {'router': {'tenant_id': _uuid()}}
data['router']['name'] = 'router1'
data['router']['external_gateway_info'] = {
'network_id': s['subnet']['network_id']}
router_req = self.new_create_request('routers', data, self.fmt)
res = router_req.get_response(self.ext_api)
router = self.deserialize(self.fmt, res)
self.assertEqual(
s['subnet']['network_id'],
router['router']['external_gateway_info']['network_id'])
self._delete('routers', router['router']['id'])
def test_router_list(self):
with contextlib.nested(self.router(),
self.router(),
self.router()
) as routers:
self._test_list_resources('router', routers)
def test_router_list_with_parameters(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
) as (router1, router2):
query_params = 'name=router1'
self._test_list_resources('router', [router1],
query_params=query_params)
query_params = 'name=router2'
self._test_list_resources('router', [router2],
query_params=query_params)
query_params = 'name=router3'
self._test_list_resources('router', [],
query_params=query_params)
def test_router_list_with_sort(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_sort('router', (router3, router2, router1),
[('name', 'desc')])
def test_router_list_with_pagination(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_pagination('router',
(router1, router2, router3),
('name', 'asc'), 2, 2)
def test_router_list_with_pagination_reverse(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_pagination_reverse('router',
(router1, router2,
router3),
('name', 'asc'), 2, 2)
def test_router_update(self):
rname1 = "yourrouter"
rname2 = "nachorouter"
with self.router(name=rname1) as r:
body = self._show('routers', r['router']['id'])
self.assertEqual(body['router']['name'], rname1)
body = self._update('routers', r['router']['id'],
{'router': {'name': rname2}})
body = self._show('routers', r['router']['id'])
self.assertEqual(body['router']['name'], rname2)
def test_router_update_gateway(self):
with self.router() as r:
with self.subnet() as s1:
with self.subnet() as s2:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s1['subnet']['network_id'])
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s2['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s2['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s2['subnet']['network_id'])
def test_router_update_gateway_with_existed_floatingip(self):
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.floatingip_with_assoc() as fip:
self._add_external_gateway_to_router(
fip['floatingip']['router_id'],
subnet['subnet']['network_id'],
expected_code=exc.HTTPConflict.code)
def test_router_update_gateway_to_empty_with_existed_floatingip(self):
with self.floatingip_with_assoc() as fip:
self._remove_external_gateway_from_router(
fip['floatingip']['router_id'], None,
expected_code=exc.HTTPConflict.code)
def test_router_add_interface_subnet(self):
exp_notifications = ['router.create.start',
'router.create.end',
'network.create.start',
'network.create.end',
'subnet.create.start',
'subnet.create.end',
'router.interface.create',
'router.interface.delete']
test_notifier.NOTIFICATIONS = []
with self.router() as r:
with self.subnet() as s:
body = self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self.assertTrue('port_id' in body)
# fetch port and confirm device_id
r_port_id = body['port_id']
body = self._show('ports', r_port_id)
self.assertEqual(body['port']['device_id'], r['router']['id'])
body = self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
body = self._show('ports', r_port_id,
expected_code=exc.HTTPNotFound.code)
self.assertEqual(
set(exp_notifications),
set(n['event_type'] for n in test_notifier.NOTIFICATIONS))
for n in test_notifier.NOTIFICATIONS:
if n['event_type'].startswith('router.interface.'):
payload = n['payload']['router.interface']
self.assertIn('id', payload)
self.assertEquals(payload['id'], r['router']['id'])
self.assertIn('tenant_id', payload)
stid = s['subnet']['tenant_id']
# tolerate subnet tenant deliberately to '' in the
# nicira metadata access case
self.assertTrue(payload['tenant_id'] == stid or
payload['tenant_id'] == '')
def test_router_add_interface_subnet_with_bad_tenant_returns_404(self):
with mock.patch('quantum.context.Context.to_dict') as tdict:
tenant_id = _uuid()
admin_context = {'roles': ['admin']}
tenant_context = {'tenant_id': 'bad_tenant',
'roles': []}
tdict.return_value = admin_context
with self.router(tenant_id=tenant_id) as r:
with self.network(tenant_id=tenant_id) as n:
with self.subnet(network=n) as s:
tdict.return_value = tenant_context
err_code = exc.HTTPNotFound.code
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
err_code)
tdict.return_value = admin_context
body = self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self.assertTrue('port_id' in body)
tdict.return_value = tenant_context
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None,
err_code)
tdict.return_value = admin_context
body = self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_interface_subnet_with_port_from_other_tenant(self):
tenant_id = _uuid()
other_tenant_id = _uuid()
tenant_context = context.Context(user_id=None, tenant_id=tenant_id)
admin_context = context.get_admin_context()
with mock.patch('quantum.context.Context') as ctx:
ctx.return_value = admin_context
with contextlib.nested(
self.router(tenant_id=tenant_id),
self.network(tenant_id=tenant_id),
self.network(tenant_id=other_tenant_id)) as (r, n1, n2):
with contextlib.nested(
self.subnet(network=n1, cidr='10.0.0.0/24'),
self.subnet(network=n2, cidr='10.1.0.0/24')) as (s1, s2):
ctx.return_value = admin_context
body = self._router_interface_action(
'add',
r['router']['id'],
s2['subnet']['id'],
None)
self.assertTrue('port_id' in body)
ctx.return_value = tenant_context
self._router_interface_action(
'add',
r['router']['id'],
s1['subnet']['id'],
None)
self.assertTrue('port_id' in body)
self._router_interface_action(
'remove',
r['router']['id'],
s1['subnet']['id'],
None)
ctx.return_value = admin_context
body = self._router_interface_action(
'remove',
r['router']['id'],
s2['subnet']['id'],
None)
def test_router_add_interface_port(self):
with self.router() as r:
with self.port(no_delete=True) as p:
body = self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self.assertTrue('port_id' in body)
self.assertEqual(body['port_id'], p['port']['id'])
# fetch port and confirm device_id
body = self._show('ports', p['port']['id'])
self.assertEqual(body['port']['device_id'], r['router']['id'])
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_add_interface_port_bad_tenant_returns_404(self):
with mock.patch('quantum.context.Context.to_dict') as tdict:
admin_context = {'roles': ['admin']}
tenant_context = {'tenant_id': 'bad_tenant',
'roles': []}
tdict.return_value = admin_context
with self.router() as r:
with self.port(no_delete=True) as p:
tdict.return_value = tenant_context
err_code = exc.HTTPNotFound.code
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'],
err_code)
tdict.return_value = admin_context
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
tdict.return_value = tenant_context
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'],
err_code)
tdict.return_value = admin_context
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_add_interface_dup_subnet1_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_interface_dup_subnet2_returns_400(self):
with self.router() as r:
with self.subnet() as s:
with self.port(subnet=s, no_delete=True) as p1:
with self.port(subnet=s) as p2:
self._router_interface_action('add',
r['router']['id'],
None,
p1['port']['id'])
self._router_interface_action('add',
r['router']['id'],
None,
p2['port']['id'],
expected_code=exc.
HTTPBadRequest.code)
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p1['port']['id'])
def test_router_add_interface_overlapped_cidr_returns_400(self):
with self.router() as r:
with self.subnet(cidr='10.0.1.0/24') as s1:
self._router_interface_action('add',
r['router']['id'],
s1['subnet']['id'],
None)
def try_overlapped_cidr(cidr):
with self.subnet(cidr=cidr) as s2:
self._router_interface_action('add',
r['router']['id'],
s2['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
# another subnet with same cidr
try_overlapped_cidr('10.0.1.0/24')
# another subnet with overlapped cidr including s1
try_overlapped_cidr('10.0.0.0/16')
# another subnet with overlapped cidr included by s1
try_overlapped_cidr('10.0.1.1/32')
# clean-up
self._router_interface_action('remove',
r['router']['id'],
s1['subnet']['id'],
None)
def test_router_add_interface_no_data_returns_400(self):
with self.router() as r:
self._router_interface_action('add',
r['router']['id'],
None,
None,
expected_code=exc.
HTTPBadRequest.code)
def test_router_add_gateway_dup_subnet1_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_gateway_dup_subnet2_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
def test_router_add_gateway(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = body['router']['external_gateway_info']['network_id']
self.assertEqual(net_id, s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
gw_info = body['router']['external_gateway_info']
self.assertEqual(gw_info, None)
def test_router_add_gateway_tenant_ctx(self):
with self.router(tenant_id='noadmin',
set_context=True) as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
ctx = context.Context('', 'noadmin')
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
quantum_context=ctx)
body = self._show('routers', r['router']['id'])
net_id = body['router']['external_gateway_info']['network_id']
self.assertEqual(net_id, s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
gw_info = body['router']['external_gateway_info']
self.assertEqual(gw_info, None)
def test_router_add_gateway_invalid_network_returns_404(self):
with self.router() as r:
self._add_external_gateway_to_router(
r['router']['id'],
"foobar", expected_code=exc.HTTPNotFound.code)
def test_router_add_gateway_net_not_external_returns_400(self):
with self.router() as r:
with self.subnet() as s:
# intentionally do not set net as external
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
def test_router_add_gateway_no_subnet_returns_400(self):
with self.router() as r:
with self.network() as n:
self._set_net_external(n['network']['id'])
self._add_external_gateway_to_router(
r['router']['id'],
n['network']['id'], expected_code=exc.HTTPBadRequest.code)
def test_router_remove_interface_inuse_returns_409(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._delete('routers', r['router']['id'],
expected_code=exc.HTTPConflict.code)
# remove interface so test can exit without errors
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_remove_interface_wrong_subnet_returns_409(self):
with self.router() as r:
with self.subnet() as s:
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
p['port']['id'],
exc.HTTPConflict.code)
#remove properly to clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_remove_interface_returns_200(self):
with self.router() as r:
with self.port(no_delete=True) as p:
body = self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'],
expected_body=body)
def test_router_remove_interface_wrong_port_returns_404(self):
with self.router() as r:
with self.subnet():
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# create another port for testing failure case
res = self._create_port(self.fmt, p['port']['network_id'])
p2 = self.deserialize(self.fmt, res)
self._router_interface_action('remove',
r['router']['id'],
None,
p2['port']['id'],
exc.HTTPNotFound.code)
# remove correct interface to cleanup
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
# remove extra port created
self._delete('ports', p2['port']['id'])
def test_router_delete(self):
with self.router() as router:
router_id = router['router']['id']
req = self.new_show_request('router', router_id)
res = req.get_response(self._api_for_resource('router'))
self.assertEqual(res.status_int, 404)
def test_router_delete_with_port_existed_returns_409(self):
with self.subnet() as subnet:
res = self._create_router(self.fmt, _uuid())
router = self.deserialize(self.fmt, res)
self._router_interface_action('add',
router['router']['id'],
subnet['subnet']['id'],
None)
self._delete('routers', router['router']['id'],
exc.HTTPConflict.code)
self._router_interface_action('remove',
router['router']['id'],
subnet['subnet']['id'],
None)
self._delete('routers', router['router']['id'])
def test_router_delete_with_floatingip_existed_returns_409(self):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.subnet(cidr='12.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
res = self._create_router(self.fmt, _uuid())
r = self.deserialize(self.fmt, res)
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
res = self._create_floatingip(
self.fmt, public_sub['subnet']['network_id'],
port_id=p['port']['id'])
self.assertEqual(res.status_int, exc.HTTPCreated.code)
floatingip = self.deserialize(self.fmt, res)
self._delete('routers', r['router']['id'],
expected_code=exc.HTTPConflict.code)
# Cleanup
self._delete('floatingips', floatingip['floatingip']['id'])
self._router_interface_action('remove', r['router']['id'],
private_sub['subnet']['id'],
None)
self._delete('routers', r['router']['id'])
def test_router_show(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name='router1', admin_state_up=True,
tenant_id=tenant_id) as router:
res = self._show('routers', router['router']['id'])
for k, v in expected_value:
self.assertEqual(res['router'][k], v)
def test_network_update_external_failure(self):
with self.router() as r:
with self.subnet() as s1:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._update('networks', s1['subnet']['network_id'],
{'network': {'router:external': False}},
expected_code=exc.HTTPConflict.code)
self._remove_external_gateway_from_router(
r['router']['id'],
s1['subnet']['network_id'])
def test_network_update_external(self):
with self.router() as r:
with self.network('test_net') as testnet:
self._set_net_external(testnet['network']['id'])
with self.subnet() as s1:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._update('networks', testnet['network']['id'],
{'network': {'router:external': False}})
self._remove_external_gateway_from_router(
r['router']['id'],
s1['subnet']['network_id'])
def test_floatingip_crd_ops(self):
with self.floatingip_with_assoc() as fip:
self._validate_floating_ip(fip)
# post-delete, check that it is really gone
body = self._list('floatingips')
self.assertEqual(len(body['floatingips']), 0)
self._show('floatingips', fip['floatingip']['id'],
expected_code=exc.HTTPNotFound.code)
def _test_floatingip_with_assoc_fails(self, plugin_class):
with self.subnet(cidr='200.0.0.1/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
method = plugin_class + '._update_fip_assoc'
with mock.patch(method) as pl:
pl.side_effect = q_exc.BadRequest(
resource='floatingip',
msg='fake_error')
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'])
self.assertEqual(res.status_int, 400)
for p in self._list('ports')['ports']:
if p['device_owner'] == 'network:floatingip':
self.fail('garbage port is not deleted')
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('remove',
r['router']['id'],
private_sub['subnet']['id'],
None)
def test_floatingip_with_assoc_fails(self):
self._test_floatingip_with_assoc_fails(
'quantum.db.l3_db.L3_NAT_db_mixin')
def test_floatingip_update(self):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.floatingip_no_assoc(private_sub) as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['port_id'], None)
self.assertEqual(body['floatingip']['fixed_ip_address'], None)
port_id = p['port']['id']
ip_address = p['port']['fixed_ips'][0]['ip_address']
body = self._update('floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': port_id}})
self.assertEqual(body['floatingip']['port_id'], port_id)
self.assertEqual(body['floatingip']['fixed_ip_address'],
ip_address)
def test_floatingip_with_assoc(self):
with self.floatingip_with_assoc() as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
self.assertEqual(body['floatingip']['port_id'],
fip['floatingip']['port_id'])
self.assertTrue(body['floatingip']['fixed_ip_address'] is not None)
self.assertTrue(body['floatingip']['router_id'] is not None)
def test_floatingip_port_delete(self):
with self.subnet() as private_sub:
with self.floatingip_no_assoc(private_sub) as fip:
with self.port(subnet=private_sub) as p:
body = self._update('floatingips', fip['floatingip']['id'],
{'floatingip':
{'port_id': p['port']['id']}})
# note: once this port goes out of scope, the port will be
# deleted, which is what we want to test. We want to confirm
# that the fields are set back to None
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
self.assertEqual(body['floatingip']['port_id'], None)
self.assertEqual(body['floatingip']['fixed_ip_address'], None)
self.assertEqual(body['floatingip']['router_id'], None)
def test_two_fips_one_port_invalid_return_409(self):
with self.floatingip_with_assoc() as fip1:
res = self._create_floatingip(
self.fmt,
fip1['floatingip']['floating_network_id'],
fip1['floatingip']['port_id'])
self.assertEqual(res.status_int, exc.HTTPConflict.code)
def test_floating_ip_direct_port_delete_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == 'network:floatingip':
self._delete('ports', p['id'],
expected_code=exc.HTTPConflict.code)
found = True
self.assertTrue(found)
def test_create_floatingip_no_ext_gateway_return_404(self):
with self.subnet() as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router():
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'])
# this should be some kind of error
self.assertEqual(res.status_int, exc.HTTPNotFound.code)
def test_create_floating_non_ext_network_returns_400(self):
with self.subnet() as public_sub:
# normally we would set the network of public_sub to be
# external, but the point of this test is to handle when
# that is not the case
with self.router():
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'])
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_create_floatingip_no_public_subnet_returns_400(self):
with self.network() as public_network:
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
res = self._create_floatingip(
self.fmt,
public_network['network']['id'],
port_id=private_port['port']['id'])
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
# cleanup
self._router_interface_action('remove',
r['router']['id'],
private_sub['subnet']['id'],
None)
def test_create_floatingip_invalid_floating_network_id_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, 'iamnotanuuid',
uuidutils.generate_uuid(), '192.168.0.1')
self.assertEqual(res.status_int, 400)
def test_create_floatingip_invalid_floating_port_id_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, uuidutils.generate_uuid(),
'iamnotanuuid', '192.168.0.1')
self.assertEqual(res.status_int, 400)
def test_create_floatingip_invalid_fixed_ip_address_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, uuidutils.generate_uuid(),
uuidutils.generate_uuid(), 'iamnotnanip')
self.assertEqual(res.status_int, 400)
def test_floatingip_list_with_sort(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_sort('floatingip', (fp3, fp2, fp1),
[('floating_ip_address', 'desc')])
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_list_with_port_id(self):
with self.floatingip_with_assoc() as fip:
port_id = fip['floatingip']['port_id']
res = self._list('floatingips',
query_params="port_id=%s" % port_id)
self.assertEqual(len(res['floatingips']), 1)
res = self._list('floatingips', query_params="port_id=aaa")
self.assertEqual(len(res['floatingips']), 0)
def test_floatingip_list_with_pagination(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_pagination(
'floatingip', (fp1, fp2, fp3),
('floating_ip_address', 'asc'), 2, 2)
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_list_with_pagination_reverse(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_pagination_reverse(
'floatingip', (fp1, fp2, fp3),
('floating_ip_address', 'asc'), 2, 2)
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_delete_router_intf_with_subnet_id_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == 'network:router_interface':
subnet_id = p['fixed_ips'][0]['subnet_id']
router_id = p['device_id']
self._router_interface_action(
'remove', router_id, subnet_id, None,
expected_code=exc.HTTPConflict.code)
found = True
break
self.assertTrue(found)
def test_floatingip_delete_router_intf_with_port_id_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == 'network:router_interface':
router_id = p['device_id']
self._router_interface_action(
'remove', router_id, None, p['id'],
expected_code=exc.HTTPConflict.code)
found = True
break
self.assertTrue(found)
def test_list_nets_external(self):
with self.network() as n1:
self._set_net_external(n1['network']['id'])
with self.network():
body = self._list('networks')
self.assertEqual(len(body['networks']), 2)
body = self._list('networks',
query_params="%s=True" % l3.EXTERNAL)
self.assertEqual(len(body['networks']), 1)
body = self._list('networks',
query_params="%s=False" % l3.EXTERNAL)
self.assertEqual(len(body['networks']), 1)
def test_list_nets_external_pagination(self):
if self._skip_native_pagination:
self.skipTest("Skip test for not implemented pagination feature")
with contextlib.nested(self.network(name='net1'),
self.network(name='net3')) as (n1, n3):
self._set_net_external(n1['network']['id'])
self._set_net_external(n3['network']['id'])
with self.network(name='net2') as n2:
self._test_list_with_pagination(
'network', (n1, n3), ('name', 'asc'), 1, 3,
query_params='router:external=True')
self._test_list_with_pagination(
'network', (n2, ), ('name', 'asc'), 1, 2,
query_params='router:external=False')
def test_get_network_succeeds_without_filter(self):
plugin = QuantumManager.get_plugin()
ctx = context.Context(None, None, is_admin=True)
result = plugin.get_networks(ctx, filters=None)
self.assertEqual(result, [])
def test_network_filter_hook_admin_context(self):
plugin = QuantumManager.get_plugin()
ctx = context.Context(None, None, is_admin=True)
model = models_v2.Network
conditions = plugin._network_filter_hook(ctx, model, [])
self.assertEqual(conditions, [])
def test_network_filter_hook_nonadmin_context(self):
plugin = QuantumManager.get_plugin()
ctx = context.Context('edinson', 'cavani')
model = models_v2.Network
txt = "externalnetworks.network_id IS NOT NULL"
conditions = plugin._network_filter_hook(ctx, model, [])
self.assertEqual(conditions.__str__(), txt)
# Try to concatenate confitions
conditions = plugin._network_filter_hook(ctx, model, conditions)
self.assertEqual(conditions.__str__(), "%s OR %s" % (txt, txt))
def test_create_port_external_network_non_admin_fails(self):
with self.network(router__external=True) as ext_net:
with self.subnet(network=ext_net) as ext_subnet:
with testtools.ExpectedException(
exc.HTTPClientError) as ctx_manager:
with self.port(subnet=ext_subnet,
set_context='True',
tenant_id='noadmin'):
pass
self.assertEqual(ctx_manager.exception.code, 403)
def test_create_port_external_network_admin_suceeds(self):
with self.network(router__external=True) as ext_net:
with self.subnet(network=ext_net) as ext_subnet:
with self.port(subnet=ext_subnet) as port:
self.assertEqual(port['port']['network_id'],
ext_net['network']['id'])
def test_create_external_network_non_admin_fails(self):
with testtools.ExpectedException(exc.HTTPClientError) as ctx_manager:
with self.network(router__external=True,
set_context='True',
tenant_id='noadmin'):
pass
self.assertEqual(ctx_manager.exception.code, 403)
def test_create_external_network_admin_suceeds(self):
with self.network(router__external=True) as ext_net:
self.assertEqual(ext_net['network'][l3.EXTERNAL],
True)
def _test_notify_op_agent(self, target_func, *args):
l3_rpc_agent_api_str = (
'quantum.api.rpc.agentnotifiers.l3_rpc_agent_api.L3AgentNotifyAPI')
oldNotify = l3_rpc_agent_api.L3AgentNotify
try:
with mock.patch(l3_rpc_agent_api_str) as notifyApi:
l3_rpc_agent_api.L3AgentNotify = notifyApi
kargs = [item for item in args]
kargs.append(notifyApi)
target_func(*kargs)
except Exception:
l3_rpc_agent_api.L3AgentNotify = oldNotify
raise
else:
l3_rpc_agent_api.L3AgentNotify = oldNotify
def _test_router_gateway_op_agent(self, notifyApi):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
self.assertEqual(
2, notifyApi.routers_updated.call_count)
def test_router_gateway_op_agent(self):
self._test_notify_op_agent(self._test_router_gateway_op_agent)
def _test_interfaces_op_agent(self, r, notifyApi):
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
self.assertEqual(2, notifyApi.routers_updated.call_count)
def test_interfaces_op_agent(self):
with self.router() as r:
self._test_notify_op_agent(
self._test_interfaces_op_agent, r)
def _test_floatingips_op_agent(self, notifyApi):
with self.floatingip_with_assoc():
pass
# add gateway, add interface, associate, deletion of floatingip,
# delete gateway, delete interface
self.assertEqual(6, notifyApi.routers_updated.call_count)
def test_floatingips_op_agent(self):
self._test_notify_op_agent(self._test_floatingips_op_agent)
def test_l3_agent_routers_query_interfaces(self):
with self.router() as r:
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
plugin = TestL3NatPlugin()
routers = plugin.get_sync_data(context.get_admin_context(),
None)
self.assertEqual(1, len(routers))
interfaces = routers[0][l3_constants.INTERFACE_KEY]
self.assertEqual(1, len(interfaces))
subnet_id = interfaces[0]['subnet']['id']
wanted_subnetid = p['port']['fixed_ips'][0]['subnet_id']
self.assertEqual(wanted_subnetid, subnet_id)
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_l3_agent_routers_query_ignore_interfaces_with_moreThanOneIp(self):
with self.router() as r:
with self.subnet(cidr='9.0.1.0/24') as subnet:
with self.port(subnet=subnet,
no_delete=True,
fixed_ips=[{'ip_address': '9.0.1.3'}]) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
port = {'port': {'fixed_ips':
[{'ip_address': '9.0.1.4',
'subnet_id': subnet['subnet']['id']},
{'ip_address': '9.0.1.5',
'subnet_id': subnet['subnet']['id']}]}}
plugin = TestL3NatPlugin()
ctx = context.get_admin_context()
plugin.update_port(ctx, p['port']['id'], port)
routers = plugin.get_sync_data(ctx, None)
self.assertEqual(1, len(routers))
interfaces = routers[0].get(l3_constants.INTERFACE_KEY, [])
self.assertEqual(1, len(interfaces))
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_l3_agent_routers_query_gateway(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
plugin = TestL3NatPlugin()
routers = plugin.get_sync_data(context.get_admin_context(),
[r['router']['id']])
self.assertEqual(1, len(routers))
gw_port = routers[0]['gw_port']
self.assertEqual(s['subnet']['id'], gw_port['subnet']['id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
def test_l3_agent_routers_query_floatingips(self):
with self.floatingip_with_assoc() as fip:
plugin = TestL3NatPlugin()
routers = plugin.get_sync_data(context.get_admin_context(),
[fip['floatingip']['router_id']])
self.assertEqual(1, len(routers))
floatingips = routers[0][l3_constants.FLOATINGIP_KEY]
self.assertEqual(1, len(floatingips))
self.assertEqual(floatingips[0]['id'],
fip['floatingip']['id'])
self.assertEqual(floatingips[0]['port_id'],
fip['floatingip']['port_id'])
self.assertTrue(floatingips[0]['fixed_ip_address'] is not None)
self.assertTrue(floatingips[0]['router_id'] is not None)
def test_router_delete_subnet_inuse_returns_409(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
# subnet cannot be delete as it's attached to a router
self._delete('subnets', s['subnet']['id'],
expected_code=exc.HTTPConflict.code)
# remove interface so test can exit without errors
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
class L3NatDBTestCaseXML(L3NatDBTestCase):
fmt = 'xml'
| {
"content_hash": "fc59807538fe22c71710a6dbe65c7fe1",
"timestamp": "",
"source": "github",
"line_count": 1656,
"max_line_length": 79,
"avg_line_length": 48.380434782608695,
"alnum_prop": 0.4667116003894256,
"repo_name": "yamt/neutron",
"id": "22c199a8f0503237660da4268e0081dda8232bf4",
"size": "80842",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "quantum/tests/unit/test_l3_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "37307"
},
{
"name": "JavaScript",
"bytes": "67928"
},
{
"name": "Perl",
"bytes": "235"
},
{
"name": "Python",
"bytes": "4078056"
},
{
"name": "Shell",
"bytes": "10023"
},
{
"name": "XSLT",
"bytes": "50907"
}
],
"symlink_target": ""
} |
BOT_NAME = 'stock_scraper'
SPIDER_MODULES = ['stock_scraper.spiders']
NEWSPIDER_MODULE = 'stock_scraper.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'stock_scraper (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'stock_scraper.middlewares.StockScraperSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'stock_scraper.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'stock_scraper.pipelines.StockScraperPipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| {
"content_hash": "f488d91470525f441f103f7d999f9f78",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 109,
"avg_line_length": 34.91139240506329,
"alnum_prop": 0.7697606961566352,
"repo_name": "zlalvani/stock-scraper",
"id": "114951eab5c47e3d67421ab824df7d85076c2b73",
"size": "3196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stock_scraper/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7230"
}
],
"symlink_target": ""
} |
"""pdfminer_xml_bindings.py:
Provides functions to convert the DOM tree generated by pdfminer to
proper Python objects.
"""
import operator
import unicodedata
from confopy.pdfextract import xml_util
# Constants
SVG_HEADER = u"""<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg width="800px" height="1000px" version="1.1"
xmlns="http://www.w3.org/2000/svg">
<style type="text/css" >
<![CDATA[
ellipse, rect {
stroke: #000000;
fill: #ffffff;
}
.textgroup {
stroke: none;
fill-opacity: 0.2;
}
]]>
</style>
"""
SVG_FOOTER = u"""</svg>
"""
SVG_TOP_PADDING = 16
SVG_LEFT_PADDING = 16
# Classes
class Box(object):
"""Super class for all objects that have a bounding box (and text).
"""
def __init__(self, bbox):
self.bbox = bbox
def width(self):
return self.bbox[2] - self.bbox[0]
def height(self):
return self.bbox[3] - self.bbox[1]
def as_svg(self):
return bbox2svg(self.bbox)
def _print(self, pad=u""):
pass
# def as_textboxes(self):
# return []
#
# def as_lines(self):
# return []
def __repr__(self):
return "Box(bbox=" + repr(self.bbox) + ")"
class TextBox(Box):
def __init__(self, ID, bbox, lines, font, emph):
"""Constructor.
Args:
ID: ID of the textbox.
bbox: Bounding box of the textbox.
lines: List of strings representing the lines in the textbox.
font: Tuple of dominant font and fontsize in the textbox.
fontsize: Dominant font size in the textbox.
emph: List of words (string) showing highlighted words of
argument "lines".
"""
super(TextBox, self).__init__(bbox)
self.ID = ID
self.lines = lines
self.line_count = len(lines)
self.font = font
self.word_count = reduce(operator.add, map(lambda line: len(line.split()), lines), 0)
self.character_count = reduce(operator.add, map(lambda line: len(line), lines), 0)
self.emph = emph
# def as_lines(self):
# return self.lines
def _print(self, pad=u""):
if self.lines == []:
print(pad + u"<TextBox ID=%s>" % (self.ID))
else:
print(pad + u"<TextBox ID=%s #w=%s %s>" % (self.ID, self.word_count, self.font))
for line in self.lines:
print(pad + u" " + line)
#for c in line:
# print repr(c)
if len(self.emph) > 0:
print(pad + u" <emph=[" + reduce(lambda a,b: a+u", "+b, self.emph) + "]>")
def __unicode__(self):
if self.lines == []:
return u"## Empty (Layout-)Textbox " + self.ID
if self.ID != "":
return u"## Textbox " + self.ID + u" (%s words, %s, %s) ##\n" % (self.word_count, self.font, self.fontsize) + u"\n".join(self.lines)
return u"## Textbox (%s words, %s, %s) ##\n" % (self.word_count, self.font, self.fontsize) + u"\n".join(self.lines)
#return "\n".join(unicodedata.normalize("NFKD", l) \
# .encode("latin1", "replace") for l in self.lines)
def __repr__(self):
return "TextBox(super=%r,ID=%r,lines=%r,emph=%r)" \
% (super(TextBox, self).__repr__(), \
self.ID, self.lines, self.emph)
class Page(Box):
"""A page is a collection of textboxes with hierarchical layout
information (textgroups).
Attributes:
ID: Usually integer ID of the page.
textboxes: List of Textbox objects.
layout: Root textgroup having multiple textgroups/dummy textboxes
as children.
prim_font: Triple of primary font, primary fontsize and #words using it.
Primary font is the font used for most words on the page
Types: (unicode, unicode, int)
word_count: Number of words on the page/in the textboxes.
"""
def __init__(self, ID, bbox, textboxes, layout=None):
"""Constructor.
"""
super(Page, self).__init__(bbox)
self.ID = ID
self.textboxes = textboxes
self._textboxes_by_ID = dict()
for tb in textboxes:
self._textboxes_by_ID[tb.ID] = tb
self.layout = layout
self.prim_font = find_primary_font(textboxes=textboxes)
self.word_count = reduce(operator.add, map(lambda tb: tb.word_count, textboxes), 0)
#print unicode(self).encode("utf-8")
def get_textbox(self, tbID):
try:
return self._textboxes_by_ID[tbID]
except KeyError:
print u"Page %s does not contain textbox with ID %s" % (self.ID, tbID)
return None
def get_siblings(self, tb):
raise NotImplementedError("get textbox siblings not yet implemented")
def is_sibling(self, tb, mb_sib, aunteded_by=None):
"""is_sibling
Args:
tb: A TextBox.
mb_sib: Another TextBox. The mission is to check whether mb_sib
is a sibling of tb (= is in the same TextGroup).
aunted_by: Additional TextBox constraining the check.
mb_sib must not only be a sibling of tb but must also be
in a TextGroup that is a sibling to aunted_by.
Return:
True if mb_sib is a sibling of tb. If aunted_by parameter is used
mb_sib's and tb's parent TextGroup must be a sibling of aunted_by.
False otherwise.
"""
tg = self.get_textgroup(tb)
if tg and tg.has_child(mb_sib):
if not aunteded_by:
return True
elif aunteded_by and tg.parent:
return tg.parent.has_child(aunteded_by)
else:
return False
return False
def get_textgroup(self, tb, group=None):
"""Gets the TextGroup that contains the passed TextBox.
Args:
tb: TextBox for which the TextGroup should be returned.
group: TextGroup for recursion.
Return:
TextGroup
"""
group = group or self.layout
if group:
for c in group.children:
if isinstance(c, TextBox):
if c.ID == tb.ID:
return group
elif isinstance(c, TextGroup):
rec = self.get_textgroup(tb, c)
if rec:
return rec
return None
def as_svg(self):
svg = [SVG_HEADER]
str_fmt = '<g transform="translate(%s, %s) scale(1, -1)">\n'
svg.append(str_fmt % (SVG_LEFT_PADDING, self.height() + SVG_TOP_PADDING))
svg.append(bbox2svg(self.bbox) + '\n')
for box in self.textboxes:
svg.append(box.as_svg() + '\n')
for group in self.textgroups:
svg.append(group.as_svg() + '\n')
svg.append('</g>\n')
svg.append(SVG_FOOTER)
svg = ''.join(svg)
return svg
# def as_textboxes(self):
# return self.textboxes
def _print(self, pad=u""):
print(pad + u"<Page ID=%s #w=%s prim_font=%s>" % (self.ID, self.word_count, self.prim_font))
for tb in self.textboxes:
tb._print(pad + u" ")
if self.layout:
self.layout._print(pad + u" ")
def __unicode__(self):
buf = list()
buf.append((u"# Page %s (%s words, %s, %s)#" % (self.ID, self.word_count, self.font, self.fontsize)))
for tb in self.textboxes:
tb_str = unicode(tb)
if tb_str != u"":
buf.append(tb_str)
return u"\n".join(buf)
def __repr__(self):
return "Page(super=%r,ID=%r,textboxes=%r,textgroups=%r)" \
% (super(Page, self).__repr__(),
self.ID, self.textboxes, self.textgroups)
class TextGroup(Box):
def __init__(self, bbox, children=[]):
super(TextGroup, self).__init__(bbox)
self.parent = None
self.children = list(children)
for c in self.children:
c.parent = self
def has_child(self, tb):
for c in self.children:
if isinstance(c, TextBox) and c.ID == tb.ID:
return True
return False
def as_svg(self):
import random
rnd = random.random()
rnd_color = int(999999.0 * rnd)
return bbox2svg(self.bbox, rnd_color)
def _print(self, pad=u""):
print(pad + u"<TextGroup>")
for c in self.children:
c._print(pad + u" ")
def __repr__(self):
return "TextGroup(super=%r,children=%r)" \
% (super(TextGroup, self).__repr__(), self.children)
########################################################################
# Converter functions
########################################################################
## File, string and SVG functions
def DOM2pages(dom_document):
dom_pages = dom_document.getElementsByTagName("page")
# Forget about multiprocessing here.
# DOM objects seems to have some side effects not allowing this.
return map(DOM2page, dom_pages)
def str2bbox(bbstr):
"""Converts a bounding box string to the 4 corresponding float values
Example:
String: 23.000,42.000,25.000,50.000
BBox: (x1, y1, x2, y2) = (23.0, 42.0, 25.0, 50.0)
"""
return tuple(map(float, bbstr.split(",")))
def bbox2svg(bbox, fill_color=""):
x = bbox[0]
y = bbox[1]
width = bbox[2] - x
height = bbox[3] - y
if fill_color == "":
str_fmt = '<rect x="%s" y="%s" width="%s" height="%s" />'
return str_fmt % (x, y, width, height)
else:
str_fmt = '<rect x="%s" y="%s" width="%s" height="%s" style="fill:#%s"/>'
return str_fmt % (x, y, width, height, fill_color)
## DOM functions
#import re
#PDFMINER_CID = r"\(cid:\d+\)"
def DOM2textline(dom_textline):
"""Converts a DOM textline element to a tuple.
First entry of the tuple represents the textline as a string.
The second entry is a list of strings marking highlighted words in
the textline.
"""
dom_letters = dom_textline.getElementsByTagName("text")
emph = list()
letters = list()
fonts = list()
sizes = list()
font = u""
size = u""
next_letter_uml = False
for dom_letter in dom_letters:
letter = u""
if dom_letter.firstChild:
letter = dom_letter.firstChild.nodeValue
if dom_letter.hasAttributes() or letter.strip() != "":
font = unicode(dom_letter.getAttribute("font"))
size = unicode(dom_letter.getAttribute("size"))
#if re.match(PDFMINER_CID, letter):
# print "FOUND CID: %s" % letter
letter = _escape_pdfminer_cid(letter)
if letter == u"(cid:127)":
next_letter_uml = True
else:
if next_letter_uml:
letter = _convert2uml(letter)
next_letter_uml = False
for c in letter:
letters.append(unicode(c))
fonts.append(font)
sizes.append(size)
else:
letters.append(u" ")
fonts.append(font)
sizes.append(size)
line = u"".join(letters)
#emph = find_emphasis(line, fonts, sizes)
return (line, fonts, sizes)
_CID_MAP = { \
u"(cid:12)": u"fi" \
, u"(cid:13)": u"fl" \
, u"(cid:25)": u"ß" \
}
def _escape_pdfminer_cid(letter):
return _CID_MAP.get(letter, letter)
def _convert2uml(letter):
if letter == u"a":
return u"ä"
elif letter == u"o":
return u"ö"
elif letter == u"u":
return u"ü"
return letter
def DOM2textbox(dom_textbox):
ID = unicode(dom_textbox.getAttribute("id"))
bbox = str2bbox(dom_textbox.getAttribute("bbox"))
dom_lines = dom_textbox.getElementsByTagName("textline")
lines_fonts_sizes = map(DOM2textline, dom_lines)
lines = map(lambda t: t[0], lines_fonts_sizes)
fonts = map(lambda t: t[1], lines_fonts_sizes)
sizes = map(lambda t: t[2], lines_fonts_sizes)
def join_str(a, b):
return a + u"\n" + b
def join_lst(a, b):
return a + [u"\n"] + b
all_lines = reduce(join_str, lines, u"")
all_fonts = reduce(join_lst, fonts, [])
all_sizes = reduce(join_lst, sizes, [])
(primary_font, primary_size, emph) = find_emphasis(all_lines, all_fonts, all_sizes)
lines = filter(lambda l: l.strip() != u"", lines)
lines = fix_separated_words(lines)
return TextBox(ID, bbox, lines, (primary_font, primary_size), emph)
def find_emphasis(line, fonts, sizes):
"""Helper function for DOM2textbox.
Returns the primary font, font size and all words
not using those primary attributes
"""
(primary_font, font_emph) = emph_words(line, fonts)
(primary_size, size_emph) = emph_words(line, sizes)
emph = list(set(font_emph + size_emph))
return (primary_font, primary_size, emph)
def emph_words(line, attributes):
"""Helper function for find_emphasis.
Returns the words having uncommon attributes.
Args:
line: A string (character/word sequence).
attributes: A list of not necessarily unique values.
len(line) == len(attributes) is required!
Returns:
A list of words that are special (having rare attributes).
"""
global SLASHES
unique_attrs = list(set(attributes))
if "\n" in unique_attrs:
unique_attrs.remove("\n")
primary_attr = ""
emph = list()
if len(unique_attrs) == 1:
primary_attr = unique_attrs[0]
elif len(unique_attrs) > 1:
occurrences = map(lambda f: attributes.count(f), unique_attrs)
primary_attr = unique_attrs[occurrences.index(max(occurrences))]
unique_attrs.remove(primary_attr)
for attr in unique_attrs:
buf = list()
for i in range(0, len(line)):
char = line[i]
if char.strip() == u"":
buf.append(u" ")
elif attributes[i] == attr:
buf.append(char)
emph_ws = u"".join(buf)
emph = emph + filter(lambda w: w != u"", emph_ws.split(u" "))
return (primary_attr, emph)
def fix_separated_words(lines):
"""Helper function for DOM2textbox.
Combines word parts that are separated over 2 lines.
"""
new_lines = list()
i = 0
take_from_new_lines = False
while i < len(lines):
l = lines[i].strip()
if take_from_new_lines:
l = new_lines[i]
new_lines.pop()
take_from_new_lines = False
if l.endswith(u"-"):
if i + 1 < len(lines):
next_l = lines[i + 1].strip()
if next_l is not u"":
next_l_words = next_l.split()
if len(next_l_words) > 0:
if next_l[0].islower():
l = l[:-1] + next_l_words[0]
else:
l = l + next_l_words[0]
new_lines.append(l)
l = u" ".join(next_l_words[1:])
take_from_new_lines = True
new_lines.append(l)
else:
new_lines.append(l)
i += 1
return new_lines
def DOM2textgroup(dom_textgroup):
bbox = str2bbox(dom_textgroup.getAttribute("bbox"))
textboxes = map( DOM2textbox
, xml_util.getChildElementsByTagName(dom_textgroup, "textbox"))
children = map( DOM2textgroup
, xml_util.getChildElementsByTagName(dom_textgroup, "textgroup"))
return TextGroup(bbox, textboxes + children)
def DOM2page(dom_page):
page_id = unicode(dom_page.getAttribute("id"))
page_bbox = str2bbox(dom_page.getAttribute("bbox"))
#dom_textboxes = dom_page.getElementsByTagName("textbox")
dom_textboxes = xml_util.getChildElementsByTagName(dom_page, "textbox")
textboxes = map(DOM2textbox, dom_textboxes)
dom_layout = dom_page.getElementsByTagName("layout")
layout = None
if len(dom_layout):
dom_textgroups = xml_util.getChildElementsByTagName(dom_layout[0], "textgroup")
textgroups = map(DOM2textgroup, dom_textgroups)
if len(textgroups):
layout = textgroups[0]
return Page(page_id, page_bbox, textboxes, layout)
## Analysis functions
def find_primary_font(textboxes=[], pages=[]):
"""Gets the primary (most used) font across multiple textboxes or pages.
If both arguments are passed, only primary font of textboxes is
determined!
Args:
textboxes: TextBoxes to extract primary font from.
pages: Pages to extract primary font from.
Returns:
3-Tuple (unicode, unicode, int) of primary font, primary fontsize
and number of characters using this font.
Returns (u"", u"", 0) if no argument is passed.
"""
# TextBoxes
fonts = dict()
for tb in textboxes:
fonts[tb.font] = fonts.get(tb.font, 0) + tb.character_count
sorted_fonts = sorted(fonts.iteritems(), key=operator.itemgetter(1))
if len(sorted_fonts) > 0:
prim_font = sorted_fonts.pop()[0]
return (prim_font[0], prim_font[1], fonts[prim_font])
# Pages
fonts = dict()
for page in pages:
font_key = (page.prim_font[0], page.prim_font[1])
fonts[font_key] = fonts.get(font_key, 0) + page.prim_font[2]
sorted_fonts = sorted(fonts.iteritems(), key=operator.itemgetter(1))
if len(sorted_fonts) > 0:
prim_font = sorted_fonts.pop()[0]
return (prim_font[0], prim_font[1], fonts[prim_font])
# Default
return (u"", u"", 0)
""" TEST
"""
if __name__ == '__main__':
import sys
from xml.dom.minidom import parse
args = sys.argv[1:]
dom_doc = parse("SEUH_Kompetenzerwerb.xml")
pages = DOM2pages(dom_doc)
# Single page to SVG
if len(args) > 0:
pageNr = int(args[0])
if pageNr >= 0 and pageNr < len(pages):
print pages[pageNr].as_svg()
# Debug: print all pages as text
for page in pages:
print unicode(page)
print ""
| {
"content_hash": "8dab0e70826a135ac41c068b2d9c219b",
"timestamp": "",
"source": "github",
"line_count": 561,
"max_line_length": 144,
"avg_line_length": 32.90730837789661,
"alnum_prop": 0.5522452738204864,
"repo_name": "ooz/Confopy",
"id": "d199854c477bfdec5e1aaf5ae83a2b96e126802f",
"size": "18481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "confopy/pdfextract/pdfminer_xml_bindings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "182874"
},
{
"name": "Shell",
"bytes": "426"
}
],
"symlink_target": ""
} |
"""
This example:
1. Connects to the current model
2. Deploy a local charm with a oci-image resource and waits until it reports
itself active
3. Destroys the unit and application
"""
from juju import jasyncio
from juju.model import Model
from pathlib import Path
async def main():
model = Model()
print('Connecting to model')
# connect to current model with current user, per Juju CLI
await model.connect()
try:
print('Deploying local-charm')
base_dir = Path(__file__).absolute().parent.parent
charm_path = '{}/tests/integration/oci-image-charm'.format(base_dir)
resources = {"oci-image": "ubuntu/latest"}
application = await model.deploy(
charm_path,
resources=resources,
)
print('Waiting for active')
await model.block_until(
lambda: all(unit.workload_status == 'active'
for unit in application.units),
timeout=120,
)
print('Removing Charm')
await application.remove()
finally:
print('Disconnecting from model')
await model.disconnect()
if __name__ == '__main__':
jasyncio.run(main())
| {
"content_hash": "58ad25afb088908083f73ced7843ba57",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 76,
"avg_line_length": 26.152173913043477,
"alnum_prop": 0.6134663341645885,
"repo_name": "juju/python-libjuju",
"id": "fe58649ad9ceeeb9f62d20060425dd3569d75c0b",
"size": "1203",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/deploy_local_resource.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1171"
},
{
"name": "Python",
"bytes": "10522198"
},
{
"name": "Shell",
"bytes": "2161"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def is_quoted(data):
return len(data) > 1 and data[0] == data[-1] and data[0] in ('"', "'") and data[-2] != '\\'
def unquote(data):
''' removes first and last quotes from a string, if the string starts and ends with the same quotes '''
if is_quoted(data):
return data[1:-1]
return data
| {
"content_hash": "6c3bec1c21e0eaa0540f1c4ec9c4c01d",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 107,
"avg_line_length": 33.416666666666664,
"alnum_prop": 0.6259351620947631,
"repo_name": "abtreece/ansible",
"id": "7b94f906538d8bb90cb50387f5f2b6babf7c28a9",
"size": "1140",
"binary": false,
"copies": "139",
"ref": "refs/heads/stable-2.2",
"path": "lib/ansible/parsing/quoting.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
import os, sys
import numpy as np
import pickle
from config import Config
import itk
from possum import pos_itk_core
# -----------------------------------------------
# Load constatnts from the config file
# -----------------------------------------------
CONFIG = Config(file(sys.argv[1]))
def read_itk_image(image_filename):
"""
Loads the `image_filename` image. Automatically detects the type of the
image and selects approperiate image loader to handle the image file. The
returned object is of `itk.Image` type.
:param image_filename: File to load
:type image_filename: str
:return: Itk image object
:rtype: `itk.Image`
"""
# Autodetect the image type, instanciate approperiate reader type, load and
# return the image.
image_type = pos_itk_core.autodetect_file_type(image_filename)
image_reader = itk.ImageFileReader[image_type].New()
image_reader.SetFileName(image_filename)
image_reader.Update()
return image_reader.GetOutput()
def write_itk_image(image_to_save, filename):
"""
Writes the provided `image_to_save` into `filename`. Fairly simple wrapper
for the itk class `itk.ImageFileWriter`.
:param image_to_save: Image to be saved.
:type image_to_save: subclass of `itk.Image`.
:param filename: Filename to store the image.
:type filename: str
"""
writer = itk.ImageFileWriter[image_to_save].New()
writer.SetInput(image_to_save)
writer.SetFileName(filename)
writer.Update()
def reslice_image(transforms, moving_image, reference_image=None, interpolator=None, \
default_pixel_value=0):
"""
Apply `transforms` to the `moving_image` an reslice to the `reference_image` space.
:param transforms: iterable of transformations to be applied. All
transformations are expected to be subclasses of the `itk.Transform` class.
:type transforms: list
:param moving_image: image to be resliced
:type moving_image: `itk.Image`
:param reference_image: Reference image for the reslicing process.
:type rewference_image: `itk.Image`
:param default_pixel_value: The default value of the pixel/voxel. Note that
the provided value has to be compatibile with the image type. E.g. int for
scalar integer, float for float data types and, for the vector images -- an
iterable of aproperiate type and size. Be carefull here. Segfault will
surely occur in case of incompatibility.
:type default_pixel_value: int, float or tupe
"""
# Instanciate composite transform which will handle all the partial
# transformations.
composite_transform = \
itk.CompositeTransform[(itk.D, moving_image.GetImageDimension())].New()
# Fill the composite transformation with the partial transformations:
for transform in transforms:
composite_transform.AddTransform(transform)
# Assign the reference image, If there is no reference image provided, the
# moving image becomes a reference for itself.
if reference_image is None:
reference_image = moving_image
# Set up the resampling filter.
resample = itk.ResampleImageFilter[moving_image, reference_image].New()
resample.SetTransform(composite_transform)
resample.SetInput(moving_image)
resample.SetSize(reference_image.GetLargestPossibleRegion().GetSize())
resample.SetOutputOrigin(reference_image.GetOrigin())
resample.SetOutputSpacing(reference_image.GetSpacing())
resample.SetOutputDirection(reference_image.GetDirection())
resample.SetDefaultPixelValue(default_pixel_value)
resample.Update()
return resample.GetOutput()
def write_itk_matrix_transformation_to_file(transformation, filename):
"""
Writes itk transformation which is a subclass of
`itk.MatrixOffsetTransformBase` into a text file. Fairly promitive.
:param transformation: a transformation to be written to the file
:type transformation: a subclass of `itk.MatrixOffsetTransformBase`
:param filename: a file to write the transformation into
:type filename: str
"""
# http://www.itk.org/Wiki/ITK/Examples/IO/TransformFileReader
# http://review.source.kitware.com/#/c/14293/1
if (itk.Version.GetITKMajorVersion() == 4 and
itk.Version.GetITKMinorVersion() >= 5) or \
(itk.Version.GetITKMajorVersion() > 4):
transform_writer = itk.TransformFileWriterTemplate.D.New()
else:
transform_writer = itk.TransformFileWriter.New()
transform_writer.SetInput(transformation)
transform_writer.SetFileName(filename)
transform_writer.Update()
def get_random_rigid_3d_tranform_parameters(tmean, tsigma, rmean, rsigma):
translations = np.random.normal(loc=tmean, scale=tsigma)
rotations = np.random.normal(loc=np.radians(rmean), scale=np.radians(rsigma))
return translations, rotations
def get_random_rigid_2d_params(tmean, tsigma, rmean, rsigma):
translations = np.random.normal(loc=tmean, scale=tsigma)
rotations = np.random.normal(
loc=np.radians(rmean), scale=np.radians(rsigma))
transform = itk.Euler2DTransform.D.New()
transform.SetTranslation(list(translations))
transform.SetRotation(rotations)
transform.SetCenter(CONFIG['slice_image_center'])
return transform
def apply_transformation_workflow(moving_file, output_file, output_transform_file=None):
"""
"""
moving_image = read_itk_image(moving_file)
translation, rotation = get_random_rigid_3d_tranform_parameters(
tmean=CONFIG['tmean'], tsigma=CONFIG['tsigma'],
rmean=CONFIG['rmean'], rsigma=CONFIG['rsigma'])
vol_transform = itk.Euler3DTransform.D.New()
vol_transform.SetTranslation([0, 0, 0])
vol_transform.SetRotation(*list(rotation))
center = [0, 0, 0]
center[0] = moving_image.GetLargestPossibleRegion().GetSize()[0]/2
center[1] = moving_image.GetLargestPossibleRegion().GetSize()[1]/2
center[2] = moving_image.GetLargestPossibleRegion().GetSize()[2]/2
center = map(int, center) # Sometimes the indexes are returned as long ints
phisycal_center = moving_image.TransformIndexToPhysicalPoint(center)
vol_transform.SetCenter(phisycal_center)
resliced_image = reslice_image([vol_transform], moving_image,
default_pixel_value=CONFIG['default_pixel_value'])
write_itk_image(resliced_image, output_file)
if output_transform_file:
write_itk_matrix_transformation_to_file(vol_transform, output_transform_file)
def execute():
apply_transformation_workflow(CONFIG['files']['ref_input'],
CONFIG['files']['ref_deformed'],
CONFIG['files']['ref_to_ref_transform'])
for section_index in range(*CONFIG['range']):
section_transform = get_random_rigid_2d_params(
tmean=CONFIG['stmean'], tsigma=CONFIG['stsigma'],
rmean=CONFIG['srmean'], rsigma=CONFIG['srsigma'])
write_itk_matrix_transformation_to_file(section_transform,
os.path.join(CONFIG['files']['slicewise_transf'] % section_index))
if __name__ == "__main__":
execute()
| {
"content_hash": "07353ad374dd2d657c8cf691d0f8bfdf",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 88,
"avg_line_length": 36.850515463917525,
"alnum_prop": 0.6949223667645824,
"repo_name": "pmajka/poSSum",
"id": "adbb909321568c60bbbb075ea7d1ba3c126e7908",
"size": "7149",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_banana_pairwise/step_001_distort_reference_image.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "10113"
},
{
"name": "Python",
"bytes": "685592"
},
{
"name": "Shell",
"bytes": "130154"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from django.conf import settings
from advertisements.urls import providerpatterns
urlpatterns = patterns('',
url(r'^data/', include('advertisements.urls', namespace='advert')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('accounts.urls', namespace='accounts')),
url(r'^', include(providerpatterns, namespace='provider')),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
) | {
"content_hash": "f0ad634fdb3e7ba2ad4c99af4ce25236",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 103,
"avg_line_length": 30.38095238095238,
"alnum_prop": 0.6943573667711599,
"repo_name": "OpenAds/OpenAds",
"id": "d7a7e6b42e7fc85088902f10600b37e5d8df9937",
"size": "638",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "OpenAds/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "42985"
},
{
"name": "HTML",
"bytes": "15176"
},
{
"name": "JavaScript",
"bytes": "1489256"
},
{
"name": "Python",
"bytes": "137766"
}
],
"symlink_target": ""
} |
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class GetDocumentQuestionnairesResponse:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'result': 'GetDocumentQuestionnairesResult',
'status': 'str',
'error_message': 'str',
'composedOn': 'int'
}
self.result = None # GetDocumentQuestionnairesResult
self.status = None # str
self.error_message = None # str
self.composedOn = None # int
| {
"content_hash": "cb995cfc11394968f506764c44634b02",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 77,
"avg_line_length": 31.594594594594593,
"alnum_prop": 0.6569717707442259,
"repo_name": "liosha2007/temporary-groupdocs-python3-sdk",
"id": "c9f65bb514d27e45e510e44bc0adde1fba009f1c",
"size": "1191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "groupdocs/models/GetDocumentQuestionnairesResponse.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "992590"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding index on 'Release', fields ['order']
db.create_index('packages_release', ['order'])
def backwards(self, orm):
# Removing index on 'Release', fields ['order']
db.delete_index('packages_release', ['order'])
models = {
'packages.changelog': {
'Meta': {'object_name': 'ChangeLog'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime(2012, 2, 20, 18, 46, 52, 312737)', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime(2012, 2, 20, 18, 46, 52, 312858)'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['packages.Package']"}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['packages.Release']", 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_index': 'True'})
},
'packages.package': {
'Meta': {'object_name': 'Package'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime(2012, 2, 20, 18, 46, 52, 317211)'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'downloads_synced_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 20, 18, 46, 52, 317519)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime(2012, 2, 20, 18, 46, 52, 317305)'}),
'name': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'})
},
'packages.packageuri': {
'Meta': {'unique_together': "(['package', 'uri'],)", 'object_name': 'PackageURI'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_links'", 'to': "orm['packages.Package']"}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '400'})
},
'packages.readthedocspackageslug': {
'Meta': {'object_name': 'ReadTheDocsPackageSlug'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'readthedocs_slug'", 'unique': 'True', 'to': "orm['packages.Package']"}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'packages.release': {
'Meta': {'unique_together': "(('package', 'version'),)", 'object_name': 'Release'},
'author': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'author_email': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'classifiers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'releases'", 'blank': 'True', 'to': "orm['packages.TroveClassifier']"}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime(2012, 2, 20, 18, 46, 52, 315183)', 'db_index': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'download_uri': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'blank': 'True'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'license': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'maintainer': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'maintainer_email': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime(2012, 2, 20, 18, 46, 52, 315281)'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'releases'", 'to': "orm['packages.Package']"}),
'platform': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'raw_data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'requires_python': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'packages.releasefile': {
'Meta': {'unique_together': "(('release', 'type', 'python_version', 'filename'),)", 'object_name': 'ReleaseFile'},
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime(2012, 2, 20, 18, 46, 52, 314335)', 'db_index': 'True'}),
'digest': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'downloads': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '512'}),
'filename': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime(2012, 2, 20, 18, 46, 52, 314433)'}),
'python_version': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'files'", 'to': "orm['packages.Release']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '25'})
},
'packages.releaseobsolete': {
'Meta': {'object_name': 'ReleaseObsolete'},
'environment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'obsoletes'", 'to': "orm['packages.Release']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'packages.releaseprovide': {
'Meta': {'object_name': 'ReleaseProvide'},
'environment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'provides'", 'to': "orm['packages.Release']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'packages.releaserequire': {
'Meta': {'object_name': 'ReleaseRequire'},
'environment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'requires'", 'to': "orm['packages.Release']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'packages.releaseuri': {
'Meta': {'object_name': 'ReleaseURI'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'uris'", 'to': "orm['packages.Release']"}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '500'})
},
'packages.troveclassifier': {
'Meta': {'object_name': 'TroveClassifier'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trove': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '350'})
}
}
complete_apps = ['packages']
| {
"content_hash": "fe4c7a3d467c40ca738351bb6897ca4b",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 197,
"avg_line_length": 75.18897637795276,
"alnum_prop": 0.5541941564561734,
"repo_name": "crateio/crate.web",
"id": "7effebdd830b1293fd36fc34ee2e968fedd98b62",
"size": "9573",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "crate/web/packages/migrations/0012_auto.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "8467"
},
{
"name": "Python",
"bytes": "410681"
}
],
"symlink_target": ""
} |
"""ThreatConnect TI Intrusion Set"""
# standard library
from typing import TYPE_CHECKING
# first-party
from tcex.api.tc.v2.threat_intelligence.mappings.group.group import Group
if TYPE_CHECKING:
# first-party
from tcex.api.tc.v2.threat_intelligence.threat_intelligence import ThreatIntelligence
class IntrusionSet(Group):
"""Unique API calls for IntrustionSet API Endpoints
Args:
ti (ThreatIntelligence): An instance of the ThreatIntelligence Class.
name (str, kwargs): [Required for Create] The name for this Group.
owner (str, kwargs): The name for this Group. Default to default Org when not provided
"""
def __init__(self, ti: 'ThreatIntelligence', **kwargs):
"""Initialize Class Properties."""
super().__init__(
ti,
sub_type='Intrusion Set',
api_entity='intrusionSet',
api_branch='intrusionSets',
**kwargs
)
| {
"content_hash": "11ee5cb6fc1259bd2a63676210139237",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 94,
"avg_line_length": 31.7,
"alnum_prop": 0.6561514195583596,
"repo_name": "ThreatConnect-Inc/tcex",
"id": "c97eea3c89cf29c5abe1601259f5831987ba5e0e",
"size": "951",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tcex/api/tc/v2/threat_intelligence/mappings/group/group_types/intrusion_set.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2735042"
}
],
"symlink_target": ""
} |
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 2.0.21
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class BlogCategoryListResponse(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, data=None, pagination=None):
"""
BlogCategoryListResponse - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'data': 'list[BlogCategory]',
'pagination': 'Pagination'
}
self.attribute_map = {
'data': 'data',
'pagination': 'pagination'
}
self._data = data
self._pagination = pagination
@property
def data(self):
"""
Gets the data of this BlogCategoryListResponse.
:return: The data of this BlogCategoryListResponse.
:rtype: list[BlogCategory]
"""
return self._data
@data.setter
def data(self, data):
"""
Sets the data of this BlogCategoryListResponse.
:param data: The data of this BlogCategoryListResponse.
:type: list[BlogCategory]
"""
self._data = data
@property
def pagination(self):
"""
Gets the pagination of this BlogCategoryListResponse.
:return: The pagination of this BlogCategoryListResponse.
:rtype: Pagination
"""
return self._pagination
@pagination.setter
def pagination(self, pagination):
"""
Sets the pagination of this BlogCategoryListResponse.
:param pagination: The pagination of this BlogCategoryListResponse.
:type: Pagination
"""
self._pagination = pagination
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "5ed3d36d23e017e1b6a3b984a5f35f0f",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 77,
"avg_line_length": 26.23134328358209,
"alnum_prop": 0.5351351351351351,
"repo_name": "kinow-io/kinow-python-sdk",
"id": "a9853ab3bbfe4f747c20ceadd9871e1270569f58",
"size": "3532",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kinow_client/models/blog_category_list_response.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4659182"
},
{
"name": "Shell",
"bytes": "1666"
}
],
"symlink_target": ""
} |
class FormatDef:
"""
Format (internal) definition
"""
def __init__(self, name, mimetype, rdflibMapping=None):
self.name = name
self.mimetype = mimetype
self.rdflibMapping = rdflibMapping
def __str__(self):
return "%s[%s]" % (self.name, self.mimetype)
def __cmp__(self, other):
if other is None:
return -1
elif isinstance(other, FormatDef):
return -1 * int(not self.mimetype == other.mimetype)
elif "/" in other:
if ";" in other:
return -1 * int(not self.mimetype == other.split(";")[0])
else:
return -1 * int(not self.mimetype == other)
else:
return -1 * int(not self.name == other)
def __eq__(self, other):
if other is None:
return -1
elif isinstance(other, FormatDef):
return self.mimetype == other.mimetype
elif "/" in other:
if ";" in other:
return self.mimetype == other.split(";")[0]
else:
return self.mimetype == other
else:
return self.name == other
class Format:
"""
Redlink formats
"""
TEXT = FormatDef("text", "text/plain")
PDF = FormatDef("pdf", "application/pdf")
HTML = FormatDef("html", "text/html")
OFFICE = FormatDef("office", "application/doc")
OCTETSTREAM = FormatDef("octetstream", "application/octet-stream")
JSON = FormatDef("json", "application/json")
XML = FormatDef("xml", "application/xml")
REDLINKJSON = FormatDef("redlinkjson", "application/redlink-analysis+json")
REDLINKXML = FormatDef("redlinkxml", "application/redlink-analysis+xml")
JSONLD = FormatDef("jsonld", "application/ld+json", "json-ld")
RDFXML = FormatDef("rdfxml", "application/rdf+xml", "xml")
RDFJSON = FormatDef("rdfjson", "application/rdf+json")
TURTLE = FormatDef("turtle", "text/turtle", "turtle")
NT = FormatDef("nt", "text/rdf+n3", "n3")
def from_mimetype(mimetype):
"""
Returns a C{FormatDef} representing the passed mimetype
@type mimetype: str
@param mimetype: format mimetype
@return: format
"""
for name, format in Format.__dict__.items():
if isinstance(format, FormatDef):
if format == mimetype:
return format
return None
| {
"content_hash": "b1ccbbb4e9fa8c5744eaf9a6a37c2df4",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 79,
"avg_line_length": 31.38157894736842,
"alnum_prop": 0.5744234800838575,
"repo_name": "redlink-gmbh/redlink-python-sdk",
"id": "97e96a66abb63eb1793aee15bb4a48d59c3be435",
"size": "2955",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "redlink/format.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "35595"
}
],
"symlink_target": ""
} |
"""This script sends a route from a GPX file to the Android emulator.
usage: routing [-h] [-s HOST] [-p PORT] [-x SPEED] [-t STEP] gpx
positional arguments:
gpx A GPX file containing routing information
optional arguments:
-h, --help show this help message and exit
-s HOST, --host HOST Host
-p PORT, --port PORT Port
-x SPEED, --speed SPEED
Speed in km/h
-t STEP, --step STEP Step distance in meters
Copyright (C) 2015 Simon Kalt
"""
import telnetlib
import argparse
import gpxpy
from gpxpy.gpx import GPXTrackPoint
import time
def read_gpx_file(file):
"""Read GPX data from the given file."""
with open(file, 'r') as handle:
gpx = gpxpy.parse(handle)
return gpx
def send_point(connection, point):
"""Send the given point to the Android emulator via telnet."""
location_to_send = ('geo fix {longitude} {latitude}\n'
.format(**point.__dict__))
print('Sending:', location_to_send, end='')
bytestr = location_to_send.encode('ascii')
connection.write(bytestr)
def equalize_distance(points, step_distance=10):
"""Equalize the distances between the given points.
Given a list of GPX points, this function inserts points in the route
through linear interpolation so that no two points have a distance of
more than `step_distance` meters.
"""
length = len(points)
result_points = []
for i in range(length - 1):
p1 = points[i]
p2 = points[i+1]
dist = p1.distance_2d(p2)
result_points.append(p1)
if dist > step_distance:
n = int(dist // step_distance)
dist_x = p2.longitude - p1.longitude
dist_y = p2.latitude - p1.latitude
diff_x = dist_x / (n + 1)
diff_y = dist_y / (n + 1)
for j in range(1, n + 1):
new_lon = p1.longitude + j * diff_x
new_lat = p1.latitude + j * diff_y
new_point = GPXTrackPoint(new_lat, new_lon)
result_points.append(new_point)
result_points.append(p2)
return result_points
def run(file, host='127.0.0.1', port=5554, speed=20, step_distance=5):
"""Send the route given in the GPX file to the Android emulator."""
ms = kmh_to_ms(speed)
sleep = step_distance / ms
conn = telnetlib.Telnet(host, port)
gpx = read_gpx_file(file)
for track in gpx.tracks:
for seg in track.segments:
points = equalize_distance(seg.points, step_distance)
if len(seg.points) > 0:
first = seg.points[0]
send_point(conn, first)
input('Press enter to start sending the route...')
for point in points:
send_point(conn, point)
time.sleep(sleep)
conn.close()
def kmh_to_ms(kmh):
"""Convert kilometers/hour to meters/second."""
return kmh / 3.6
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='routing')
parser.add_argument('gpx',
help='A GPX file containing routing information',
type=str)
parser.add_argument('-s', '--host', type=str,
default='127.0.0.1', help='Host')
parser.add_argument('-p', '--port', type=int,
default=5554, help='Port')
parser.add_argument('-x', '--speed', type=int,
default=20, help='Speed in km/h')
parser.add_argument('-t', '--step', type=int,
default=10, help='Step distance in meters')
args = parser.parse_args()
run(args.gpx, args.host, args.port, args.speed, args.step)
| {
"content_hash": "7dcaf70f142b3b9671bbfbd7ec165733",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 73,
"avg_line_length": 29.046875,
"alnum_prop": 0.5750403442711135,
"repo_name": "ChasingPictures/front-end",
"id": "a1c09e5f24139cc1a3ab73894b4101f4db054927",
"size": "3743",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "util/routing.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "151115"
},
{
"name": "Python",
"bytes": "3743"
},
{
"name": "Shell",
"bytes": "1307"
}
],
"symlink_target": ""
} |
from pyramid.view import view_config
from pyramid_sqlalchemy import Session
from ..models.news import NewsModel
from ..models.forum import ForumTopicModel
from ..models.link import LinkModel
@view_config(route_name='home', renderer='templates/home.jinja2')
def home_view(request):
news_list = Session.query(NewsModel).order_by(NewsModel.id.desc())[:10]
forum_topic_list = Session.query(ForumTopicModel).order_by(ForumTopicModel.id.desc())[:10]
link_list = Session.query(LinkModel).all()
return {'news_list': news_list, 'forum_topic_list': forum_topic_list, 'link_list': link_list}
| {
"content_hash": "2f3407bbfca84aa739afb97de24913af",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 97,
"avg_line_length": 46.07692307692308,
"alnum_prop": 0.7495826377295493,
"repo_name": "fosstp/fosstp",
"id": "3ac7eae6fe391cb4530be41a2d0bbbd1ee0c1057",
"size": "599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fosstp/views/home.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "41598"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Mako",
"bytes": "493"
},
{
"name": "Python",
"bytes": "43390"
}
],
"symlink_target": ""
} |
_base_ = [
'../_base_/models/faster_rcnn_r50_fpn.py', '../_base_/datasets/voc0712.py',
'../_base_/default_runtime.py'
]
model = dict(roi_head=dict(bbox_head=dict(num_classes=20)))
CLASSES = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car',
'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike',
'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor')
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/VOCdevkit/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1000, 600), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1000, 600),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(
type='RepeatDataset',
times=3,
dataset=dict(
type=dataset_type,
ann_file='data/voc0712_trainval.json',
img_prefix='data/VOCdevkit',
pipeline=train_pipeline,
classes=CLASSES)),
val=dict(
type=dataset_type,
ann_file='data/voc07_test.json',
img_prefix='data/VOCdevkit',
pipeline=test_pipeline,
classes=CLASSES),
test=dict(
type=dataset_type,
ann_file='data/voc07_test.json',
img_prefix='data/VOCdevkit',
pipeline=test_pipeline,
classes=CLASSES))
evaluation = dict(interval=1, metric='bbox')
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=None)
# learning policy
# actual epoch = 3 * 3 = 9
lr_config = dict(policy='step', step=[3])
# runtime settings
runner = dict(
type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12
| {
"content_hash": "3f699245283a5ae3a1234440fc514a62",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 79,
"avg_line_length": 33.653333333333336,
"alnum_prop": 0.5919175911251982,
"repo_name": "open-mmlab/mmdetection",
"id": "12eee2c1ecdaa5f9e84a3bd2084b00493f2f76c0",
"size": "2524",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2540"
},
{
"name": "Python",
"bytes": "4811377"
},
{
"name": "Shell",
"bytes": "47911"
}
],
"symlink_target": ""
} |
"""
This module contains tests samples to be called by the real unittest, in order to check against unittest
"""
__author__ = "Benjamin Schubert, ben.c.schubert@gmail.com"
| {
"content_hash": "c158b28f552ff2a8cbab7ee9c5ad0bff",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 104,
"avg_line_length": 34.6,
"alnum_prop": 0.7398843930635838,
"repo_name": "BenjaminSchubert/NitPycker",
"id": "377fe12f458b9aa8ae8f8368b4a14b4cf15bb96d",
"size": "173",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nitpycker/test/test_samples/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "38792"
}
],
"symlink_target": ""
} |
'''This module contains some glue code encapsulating a "main" process.
The code here wraps the most common tasks involved in creating and, especially,
training a neural network model.
'''
import climate
import os
from . import graph
logging = climate.get_logger(__name__)
class Experiment:
'''This class encapsulates tasks for training and evaluating a network.
Parameters
----------
model : :class:`Network <theanets.graph.Network>` or str
A specification for obtaining a model. If a string is given, it is
assumed to name a file containing a pickled model; this file will be
loaded and used. If a network instance is provided, it will be used
as the model. If a callable (such as a subclass) is provided, it
will be invoked using the provided keyword arguments to create a
network instance.
'''
def __init__(self, network, *args, **kwargs):
if isinstance(network, str) and os.path.isfile(network):
self.load(network)
elif isinstance(network, graph.Network):
self.network = network
else:
assert network is not graph.Network, \
'use a concrete theanets.Network subclass ' \
'like theanets.{Autoencoder,Regressor,...}'
self.network = network(*args, **kwargs)
def train(self, *args, **kwargs):
'''Train the network until the trainer converges.
All arguments are passed to :func:`train
<theanets.graph.Network.itertrain>`.
Returns
-------
training : dict
A dictionary of monitor values computed using the training dataset,
at the conclusion of training. This dictionary will at least contain
a 'loss' key that indicates the value of the loss function. Other
keys may be available depending on the trainer being used.
validation : dict
A dictionary of monitor values computed using the validation
dataset, at the conclusion of training.
'''
return self.network.train(*args, **kwargs)
def itertrain(self, *args, **kwargs):
'''Train the network iteratively.
All arguments are passed to :func:`itertrain
<theanets.graph.Network.itertrain>`.
Yields
------
training : dict
A dictionary of monitor values computed using the training dataset,
at the conclusion of training. This dictionary will at least contain
a 'loss' key that indicates the value of the loss function. Other
keys may be available depending on the trainer being used.
validation : dict
A dictionary of monitor values computed using the validation
dataset, at the conclusion of training.
'''
return self.network.itertrain(*args, **kwargs)
def save(self, path):
'''Save the current network to a pickle file on disk.
Parameters
----------
path : str
Location of the file to save the network.
'''
self.network.save(path)
def load(self, path):
'''Load a saved network from a pickle file on disk.
This method sets the ``network`` attribute of the experiment to the
loaded network model.
Parameters
----------
filename : str
Load the keyword arguments and parameters of a network from a pickle
file at the named path. If this name ends in ".gz" then the input
will automatically be gunzipped; otherwise the input will be treated
as a "raw" pickle.
Returns
-------
network : :class:`Network <graph.Network>`
A newly-constructed network, with topology and parameters loaded
from the given pickle file.
'''
self.network = graph.Network.load(path)
return self.network
| {
"content_hash": "7473ed8f34b446e97c809d18d785f1d2",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 80,
"avg_line_length": 36.174311926605505,
"alnum_prop": 0.6218615267562769,
"repo_name": "devdoer/theanets",
"id": "08e61644d920384123894c126632b7548bac9abe",
"size": "3943",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "theanets/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "227443"
}
],
"symlink_target": ""
} |
import struct
import timeit
from constants import PacketIDs, PacketSizes, PacketFormats, Blocks, TransparentBlocks, TreeShape
from math import floor
from util import notch_to_string, string_to_notch
from md5 import md5
from twisted.internet.protocol import Protocol
def unpack_byte(byte):
return struct.unpack("B",byte[0])[0]
class SchnitzelProtocol(Protocol):
def __init__(self):
self.buf = b""
self.packetsize = None
self.handlers = {
PacketIDs["Identification"]: self.identify,
PacketIDs["ClientSetBlock"]: self.setblock,
PacketIDs["PositionAndOrientation"]: self.posandort,
PacketIDs["Message"]: self.message
}
self.name = None
self.ID = None
self.op = False
self.identified = False
self.x = 0
self.y = 0
self.z = 0
self.yaw = 0
self.pitch = 0
print "SchnitzelProtocol created"
def dataReceived(self, data):
self.buf += data
while self.buf:
if not self.packetsize:
byte = unpack_byte(self.buf[0])
if byte in self.handlers:
self.packetsize = PacketSizes[byte]
else:
# print "Error! Unhandled packet! (%s)" % byte
self.transport.loseConnection()
else:
if len(self.buf) >= self.packetsize:
byte = unpack_byte(self.buf[0])
self.handlers[byte](self.buf[:PacketSizes[byte]]) # Handle packet
self.buf = self.buf[PacketSizes[byte]:]
self.packetsize = None
else:
break
def connectionLost(self, reason):
print "%s disconnected." % self.name
if self.ID and self.identified:
del self.factory.protocols[self.ID]
self.factory.usedIDs.remove(self.ID)
self.factory.sendPacketSkip(self, PacketIDs["DespawnPlayer"], self.ID)
if self.op:
self.factory.sendMessage("[OP] %s disconnected" % self.name)
else:
self.factory.sendMessage("%s disconnected" % self.name)
args = (self.name, self.x/32, self.y/32, self.z/32)
def sendPacket(self, *packet):
format = PacketFormats[packet[0]]
packet = struct.pack(format, *packet)
self.transport.write(packet)
def unpackPacket(self, data):
format = PacketFormats[unpack_byte(data[0])]
return struct.unpack(format, data)
def identify(self, data):
packet = self.unpackPacket(data)
self.name = notch_to_string(packet[2])
key = notch_to_string(packet[3])
print "%s connected." % self.name
if not self.factory.config["noverify"]:
if key != "--" and key == md5("%s%s" % (self.factory.salt, self.name)).hexdigest():
print " +- verified with minecraft.net"
else:
print " +- player is forging the username"
if self.name in self.factory.config["ops"]:
self.op = True
print " +- player is op"
# Send welcome
name = string_to_notch(self.factory.config["name"])
motd = string_to_notch(self.factory.config["motd"])
op = 0x64 if self.op else 0x00
self.sendPacket(PacketIDs["Identification"], 0x07, name, motd, op)
# Send level
gzippedmap = self.factory.world.gzip(numblocks = True)
totallen = len(gzippedmap)
currentlen = 0
self.sendPacket(PacketIDs["LevelInitialize"])
while gzippedmap:
chunk = gzippedmap[:1024].ljust(1024, '\0')
gzippedmap = gzippedmap[1024:]
currentlen += len(chunk)
pc = int(currentlen/totallen * 255)
self.sendPacket(PacketIDs["LevelDataChunk"], len(chunk), chunk, pc)
size = self.factory.world.x, self.factory.world.y, self.factory.world.z
self.sendPacket(PacketIDs["LevelFinalize"], *size)
# Acquire ID
for i in range(self.factory.config["maxplayers"]):
if not i in self.factory.usedIDs:
self.factory.protocols[i] = self
self.factory.usedIDs.append(i)
self.ID = i
break
else:
print "wait... hold on a second, that wasn't supposed to happen"
self.transport.loseConnection()
self.identified = True
# Set position
self.x = self.factory.world.x*16
self.y = self.factory.world.y*32
self.z = self.factory.world.z*16
# Spawn rest of world to client
for i in self.factory.protocols.itervalues():
if i != self and i.ID != None:
name = string_to_notch(i.name)
pos = (i.x, i.y, i.z, i.yaw, i.pitch)
self.sendPacket(PacketIDs["SpawnPlayer"], i.ID, name, *pos)
# Spawn client to rest of world
name = string_to_notch(self.name)
pos = (self.x, self.y, self.z, self.yaw, self.pitch)
pid = PacketIDs["SpawnPlayer"]
self.factory.sendPacketSkip(self, pid, self.ID, name, *pos)
if self.op:
self.factory.sendMessage("[OP] %s joined the server" % self.name)
else:
self.factory.sendMessage("%s joined the server" % self.name)
# Teleport client
self.sendPacket(PacketIDs["PositionAndOrientation"], 255, *pos)
args = (self.name, self.x/32, self.y/32, self.z/32)
def posandort(self, data):
packet = self.unpackPacket(data)
newpos = tuple(packet[2:8])
oldpos = (self.x, self.y, self.z, self.yaw, self.pitch)
if newpos != oldpos:
pid = PacketIDs["PositionAndOrientation"]
self.factory.sendPacketSkip(self, pid, self.ID, *newpos)
def setblock(self, data):
packet = self.unpackPacket(data)
x, y, z = packet[1:4]
originalbtype = packet[5]
created = bool(packet[4])
btype = originalbtype if created else Blocks["Air"]
below = self.factory.world.block(x, y-1, z)
if (not created and originalbtype == Blocks["RedMushroom"]
and self.factory.config["magicwand"]):
block = self.factory.world.block(x, y, z)
if block == Blocks["BlueCloth"]:
btype = Blocks["StationaryWater"]
elif block == Blocks["OrangeCloth"]:
btype = Blocks["StationaryLava"]
elif block == Blocks["Sapling"]:
for i in TreeShape:
coords = i[1][0]+x, i[1][1]+y, i[1][2]+z, i[0]
if i is TreeShape[0]:
x, y, z, btype = coords
else:
self.factory.world.block(*coords)
self.factory.sendPacket(PacketIDs["SetBlock"], *coords)
elif block == Blocks["LimeCloth"] or block == Blocks["Dirt"]:
btype = Blocks["Grass"]
elif btype == Blocks["Slab"] and below == Blocks["Slab"]:
self.sendPacket(PacketIDs["SetBlock"], x, y, z, Blocks["Air"])
y -= 1
btype = Blocks["DoubleSlab"]
self.factory.world.block(x, y, z, btype)
self.factory.sendPacket(PacketIDs["SetBlock"], x, y, z, btype)
if not btype in TransparentBlocks:
for i in range(y):
if self.factory.world.block(x, i, z) == Blocks["Grass"]:
ntype = Blocks["Dirt"]
self.factory.world.block(x, i, z, ntype)
self.factory.sendPacket(PacketIDs["SetBlock"], x, i, z, ntype)
break
def message(self, data):
packet = self.unpackPacket(data)
msg = notch_to_string(packet[2])
color = "&" + hex(self.ID % 16)[2]
msg = ("[OP] " if self.op else "") + color + self.name + ":&f " + msg
self.factory.sendMessage(msg, self.ID)
| {
"content_hash": "0c7698e92a3006ef74d6390aa4a0b1d2",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 97,
"avg_line_length": 39.729468599033815,
"alnum_prop": 0.5397616731517509,
"repo_name": "TazeTSchnitzel/SchnitzelCraft",
"id": "550e881e4a70eb6b02a58fe29de64315b285f89a",
"size": "8224",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "schnitzelcraft/protocols/schnitzel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17026"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'sketching.views.home', name='home'),
# url(r'^sketching/', include('sketching.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url("", include('django_socketio.urls')),
url(r'^$', 'sketching.views.home', name='home'),
url(r'^test', 'sketching.views.msg_test', name='msg_test'),
url(r'^review', 'sketching.views.review', name='review'),
url(r'^favicon\.ico$', 'django.views.generic.simple.redirect_to', {'url': '/static/favicon.ico'}),
)
| {
"content_hash": "2b22c20e3c8a8829f083350e51a62c03",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 102,
"avg_line_length": 40.59090909090909,
"alnum_prop": 0.6606942889137738,
"repo_name": "ajfisher/sketching-conf-demo",
"id": "a2d0c62966ac104d78c96b38212b2a20de25e1e2",
"size": "893",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sketching/sketching/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Arduino",
"bytes": "19516"
},
{
"name": "C",
"bytes": "2187"
},
{
"name": "C++",
"bytes": "6404"
},
{
"name": "JavaScript",
"bytes": "1312"
},
{
"name": "Objective-C",
"bytes": "4973"
},
{
"name": "Python",
"bytes": "9469"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import *
from django.views.generic import list_detail, create_update
from models import *
import views
from django.core.urlresolvers import reverse
classrule_info = {
'queryset': ClassRule.objects.all(),
'template_name': 'display_classrule.html',
}
classrule_form = {
'form_class': ClassRuleForm,
'template_name': 'add.html',
}
classrule_delete = {
'model': ClassRule,
'post_delete_redirect': '../..',
'template_name': 'delete_confirm_classrule.html',
}
urlpatterns = patterns('',
url(r'^networkaddress/$', views.networkaddress_display, name='networkaddress-displaytop'),
url(r'^networkaddress/add/$', views.networkaddress_add, name='networkaddress-addtop'),
url(r'^networkaddress/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2})/$', views.networkaddress_display, name='networkaddress-display'),
url(r'^networkaddress/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2})/delete/$', views.networkaddress_delete, name='networkaddress-delete'),
url(r'^networkaddress/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2})/add/$', views.networkaddress_add, name='networkaddress-add'),
url(r'^networkaddress/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2})/modify/$', views.networkaddress_modify, name='networkaddress-modify'),
url(r'^networkaddress/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2})/add_dhcp/$', views.dhcpnetwork_add, name='networkaddress-adddhcp'),
url(r'^networkaddress/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/ping/$', views.networkaddress_ping, name='networkaddress-ping'),
url(r'^networkaddress/$', views.networkaddress_ping, name='networkaddress-ping-url'),
url(r'^dhcpnetwork/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2})/$', views.dhcpnetwork_display, name='dhcpnetwork-display'),
url(r'^dhcpnetwork/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2})/delete/$', views.dhcpnetwork_delete, name='dhcpnetwork-delete'),
url(r'^dhcpnetwork/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2})/modify/$', views.dhcpnetwork_modify, name='dhcpnetwork-modify'),
url(r'^dhcpnetwork/(?P<address>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2})/add_dhcppool/$', views.dhcpaddresspool_add, name='dhcpnetwork-addpool'),
url(r'^dhcpaddresspool/add/$', views.dhcpaddresspool_add),
url(r'^dhcpaddresspool/(?P<range_start>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/(?P<range_finish>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/$', views.dhcpaddresspool_display, name='dhcpaddresspool-display'),
url(r'^dhcpaddresspool/(?P<range>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/delete/$', views.dhcpaddresspool_delete, name='dhcpaddresspool-delete'),
url(r'^classrule/$', list_detail.object_list, classrule_info, name='classrule-displaytop'),
url(r'^classrule/(?P<object_id>\d+)/$', list_detail.object_detail, classrule_info, name='classrule-display'),
url(r'^classrule/(?P<object_id>\d+)/modify/$', create_update.update_object, classrule_form, name='classrule-modify'),
url(r'^classrule/(?P<object_id>\d+)/delete/$', create_update.delete_object, classrule_delete, name='classrule-delete'),
url(r'^classrule/add/$', create_update.create_object, classrule_form, name='classrule-add'),
url(r'^dhcpd.conf/$', views.dhcpd_conf_generate, name='dhcp-conf-generate'),
)
| {
"content_hash": "a40481a5c8a2af526b72999ec6e79aa2",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 199,
"avg_line_length": 66.29411764705883,
"alnum_prop": 0.6498077491866312,
"repo_name": "rytis/IP-address-management-tool",
"id": "096ba6092a0f29ada8ab3952f8bad52474fa52ad",
"size": "3381",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ip_addresses/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "5013"
},
{
"name": "Python",
"bytes": "19591"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import *
from disciplines.models import Discipline
urlpatterns = patterns('disciplines.views',
url(r'^$', 'disciplines', name='discipline_list'),
url(r'^my_expertise/$', 'my_expertise', name="my_expertise"),
url(r'^discipline/(?P<slug>[-\w]+)/$', 'discipline', name="discipline_detail"),
)
| {
"content_hash": "94005d50d4cdfd84f2a16d44951ee889",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 83,
"avg_line_length": 38.22222222222222,
"alnum_prop": 0.6627906976744186,
"repo_name": "caseywstark/colab",
"id": "3306a04d94abaf6020bbd8b403ebca21b8921a9f",
"size": "344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "colab/apps/disciplines/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "79619"
},
{
"name": "Python",
"bytes": "483018"
}
],
"symlink_target": ""
} |
"""Package contenant l'éditeur 'hedit'.
Si des redéfinitions de contexte-éditeur standard doivent être faites, elles
seront placées dans ce package
Note importante : ce package contient la définition d'un éditeur, mais
celui-ci peut très bien être étendu par d'autres modules. Au quel cas,
les extensions n'apparaîtront pas ici.
"""
from primaires.interpreteur.editeur.presentation import Presentation
from primaires.interpreteur.editeur.description import Description
from primaires.interpreteur.editeur.uniligne import Uniligne
from primaires.interpreteur.editeur.choix import Choix
from .edt_resume import EdtResume
from .edt_mots_cles import EdtMotscles
from .edt_lies import EdtLies
from .edt_fils import EdtFils
from .supprimer import NSupprimer
class EdtHedit(Presentation):
"""Classe définissant l'éditeur de sujet d'aide hedit.
"""
nom = "hedit"
def __init__(self, personnage, sujet):
"""Constructeur de l'éditeur"""
if personnage:
instance_connexion = personnage.instance_connexion
else:
instance_connexion = None
Presentation.__init__(self, instance_connexion, sujet)
if personnage and sujet:
self.construire(sujet)
def __getnewargs__(self):
return (None, None)
def construire(self, sujet):
"""Construction de l'éditeur"""
# Titre
titre = self.ajouter_choix("titre", "t", Uniligne, sujet, "titre")
titre.parent = self
titre.prompt = "Titre du sujet : "
titre.apercu = "{objet.titre}"
titre.aide_courte = \
"Entrez le |ent|titre|ff| du sujet d'aide ou |cmd|/|ff| pour " \
"revenir à la fenêtre parente.\n\nTitre actuel : " \
"|bc|{objet.titre}|ff|"
# Contenu
contenu = self.ajouter_choix("contenu", "c", Description, \
sujet, "contenu")
contenu.parent = self
contenu.apercu = "{objet.contenu.paragraphes_indentes}"
contenu.aide_courte = \
"| |tit|" + "Contenu du sujet d'aide {}".format(sujet).ljust(76) + \
"|ff||\n" + self.opts.separateur
# Groupe
str_groupes = sorted(
type(self).importeur.interpreteur.groupes.nom_groupes)
groupe = self.ajouter_choix("groupe d'utilisateurs", "o", Choix,
sujet, "str_groupe", str_groupes)
groupe.parent = self
groupe.prompt = "Groupe d'utilisateurs du sujet : "
groupe.apercu = "{objet.str_groupe}"
groupe.aide_courte = \
"Entrez le |ent|groupe|ff| pouvant accéder au sujet d'aide ou " \
"|cmd|/|ff| pour revenir à la\nfenêtre parente.\n" \
"Groupes disponibles : |ent|" + "|ff|, |ent|".join(
str_groupes) + "|ff|.\n\n" \
"Groupe actuel : |bc|{objet.str_groupe}|ff|"
# Mots-clés
mots_cles = self.ajouter_choix("mots-clés", "m", EdtMotscles, sujet)
mots_cles.parent = self
mots_cles.prompt = "Entrez un mot-clé :"
mots_cles.apercu = "{objet.str_mots_cles}"
mots_cles.aide_courte = \
"Entrez un |ent|nouveau mot-clé|ff| pour l'ajouter à la liste, " \
"un |ent|mot-clé existant|ff| pour\nle supprimer ou |cmd|/|ff| " \
"pour revenir à la fenêtre précédente.\n" \
"Mots-clés de ce sujet : |bc|{objet.str_mots_cles}|ff|"
# Sujets liés
lies = self.ajouter_choix("sujets liés", "l", EdtLies, sujet)
lies.parent = self
lies.prompt = "Entrez le nom d'un sujet :"
lies.aide_courte = \
"Entrez la |ent|clé|ff| d'un sujet pour l'ajouter à la liste " \
"des sujets liés ou\nl'en supprimer ; / pour revenir à la " \
"fenêtre précédente.\n" \
"Sujets liés à celui-ci : |bc|{objet.str_sujets_lies}|ff|"
# Sujets fils
fils = self.ajouter_choix("sujets fils", "f", EdtFils, sujet)
fils.parent = self
fils.aide_courte = \
"Entrez la |ent|clé|ff| d'un sujet. Si il n'est pas dans la " \
"liste des sujets liés,\n" \
"il y sera ajouté, sinon il en sera supprimé.\n" \
"Options :\n" \
" - |ent|/u <sujet fils>|ff| : déplace un sujet vers le haut de " \
"la liste\n" \
" - |ent|/d <sujet fils>|ff| : déplace le sujet vers le bas\n\n" \
"{objet.tab_sujets_fils}"
# Suppression
suppression = self.ajouter_choix("supprimer", "sup", NSupprimer, \
sujet)
suppression.parent = self
suppression.aide_courte = "Souhaitez-vous réellement supprimer " \
"le sujet d'aide '{}' ?".format(sujet.titre)
suppression.confirme = "Le sujet d'aide '{}' a bien été " \
"supprimé.".format(sujet.titre)
| {
"content_hash": "0f5dfe9c6d58cba72aa76661aee42c82",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 80,
"avg_line_length": 40.8099173553719,
"alnum_prop": 0.5826245443499393,
"repo_name": "stormi/tsunami",
"id": "150f4ad78dd051a5e8c2562a878fa4f028289e83",
"size": "6555",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/primaires/information/editeurs/hedit/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "7188300"
},
{
"name": "Ruby",
"bytes": "373"
}
],
"symlink_target": ""
} |
import shlex
from operator import mul
import os.path
import numpy as np
from functools import reduce
class GameReader(object):
'''
Read games from different file formats (.nfg payoff, .nfg outcome), see
http://www.gambit-project.org/doc/formats.html for more information.
'''
def __init__(self):
self.game = {}
def readStr(self, string):
"""
Base function that convert text to tokens a determine which
:param string: string with nfg formated text
:type string: str
:return: dictionary with game informations
:rtype: dict
:raise: Exception, if the string is not in specified format
"""
self.game.clear()
self.tokens = shlex.split(string)
preface = ["NFG", "1", "R"]
if self.tokens[:3] != preface:
raise Exception("Input string is not valid nfg format")
self.game['name'] = self.tokens[3]
self.brackets = [i for i, x in enumerate(
self.tokens) if x == "{" or x == "}"]
if len(self.brackets) == 4:
self._nfgPayoff()
else:
self._nfgOutcome()
self.game['sum_shape'] = sum(self.game['shape'])
self.game['array'] = []
for player in range(self.game['num_players']):
self.game['array'].append(np.ndarray(
self.game['shape'], dtype=float, order="F"))
it = np.nditer(self.game['array'][0], flags=['multi_index', 'refs_ok'])
index = 0
while not it.finished:
for player in range(self.game['num_players']):
self.game['array'][player][
it.multi_index] = self.payoffs[index][player]
it.iternext()
index += 1
return self.game
def readFile(self, file):
"""
Read content of nfg file.
:param file: path to file
:type file: str
:return: dictionary with game informations
:rtype: dict
"""
with open(file) as f:
return self.readStr(f.read())
def _nfgPayoff(self):
"""
Reads content of tokens in nfg payoff format.
"""
self.game['players'] = self.tokens[
self.brackets[0] + 1:self.brackets[1]]
self.game['num_players'] = len(self.game['players'])
self.game['shape'] = self.tokens[self.brackets[2] + 1:self.brackets[3]]
self.game['shape'] = list(map(int, self.game['shape']))
payoffs_flat = self.tokens[self.brackets[3] + 1:self.brackets[3] + 1 +
reduce(mul, self.game['shape']) * self.game['num_players']]
payoffs_flat = list(map(float, payoffs_flat))
self.payoffs = []
for i in range(0, len(payoffs_flat), self.game['num_players']):
self.payoffs.append(payoffs_flat[i:i + self.game['num_players']])
def _nfgOutcome(self):
"""
Reads content of tokens in nfg outcome format.
"""
brackets_pairs = []
for i in self.brackets:
if self.tokens[i] == "{":
brackets_pairs.append([i])
if self.tokens[i] == "}":
pair = -1
while len(brackets_pairs[pair]) != 1:
pair -= 1
brackets_pairs[pair].append(i)
self.game['players'] = self.tokens[
self.brackets[0] + 1:self.brackets[1]]
self.game['num_players'] = len(self.game['players'])
i = 2
self.game['shape'] = []
while brackets_pairs[i][1] < brackets_pairs[1][1]:
self.game['shape'].append(brackets_pairs[
i][1] - brackets_pairs[i][0] - 1)
i += 1
after_brackets = brackets_pairs[i][1] + 1
i += 1
outcomes = [[0] * self.game['num_players']]
for i in range(i, len(brackets_pairs)):
outcomes.append(
[float(x.translate(None, ',')) for x in self.tokens[brackets_pairs[i][0] + 2:brackets_pairs[i][1]]])
self.payoffs = [outcomes[out]
for out in map(int, self.tokens[after_brackets:])]
def read(content):
gr = GameReader()
if os.path.isfile(content):
return gr.readFile(content)
else:
return gr.readStr(content)
| {
"content_hash": "ca7b87a718b0e11a4b92b428699a565c",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 116,
"avg_line_length": 35.90833333333333,
"alnum_prop": 0.5351589695985147,
"repo_name": "Artimi/neng",
"id": "074be62b3255962869c1e3cf47439f3488af3d4a",
"size": "5451",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neng/game_reader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "78643"
}
],
"symlink_target": ""
} |
"""Setup file for installing the BLE."""
import os
import pathlib
import setuptools
from setuptools.command import build_py
from setuptools.command import develop
current_directory = pathlib.Path(__file__).parent
description = (current_directory / 'README.md').read_text()
core_requirements = [
'absl-py',
'dopamine-rl >= 4.0.0',
'flax',
'gin-config',
'gym',
'jax >= 0.2.28',
'jaxlib >= 0.1.76',
'opensimplex <= 0.3.0',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-probability',
'transitions',
]
acme_requirements = [
'dm-acme',
'dm-haiku',
'dm-reverb',
'dm-sonnet',
'rlax',
'xmanager',
]
def generate_requirements_file(path=None):
"""Generates requirements.txt file needed for running Acme.
It is used by Launchpad GCP runtime to generate Acme requirements to be
installed inside the docker image. Acme itself is not installed from pypi,
but instead sources are copied over to reflect any local changes made to
the codebase.
Args:
path: path to the requirements.txt file to generate.
"""
if not path:
path = os.path.join(os.path.dirname(__file__), 'acme_requirements.txt')
with open(path, 'w') as f:
for package in set(core_requirements + acme_requirements):
f.write(f'{package}\n')
class BuildPy(build_py.build_py):
def run(self):
generate_requirements_file()
build_py.build_py.run(self)
class Develop(develop.develop):
def run(self):
generate_requirements_file()
develop.develop.run(self)
cmdclass = {
'build_py': BuildPy,
'develop': Develop,
}
entry_points = {
'gym.envs': [
'__root__=balloon_learning_environment.env.gym:register_env'
]
}
setuptools.setup(
name='balloon_learning_environment',
long_description=description,
long_description_content_type='text/markdown',
version='1.0.1',
cmdclass=cmdclass,
packages=setuptools.find_packages(),
install_requires=core_requirements,
extras_require={
'acme': acme_requirements,
},
package_data={
'': ['*.msgpack', '*.pb', '*.gin'],
},
entry_points=entry_points,
python_requires='>=3.7',
)
| {
"content_hash": "c7d7699e242f610cd5f93fe439650545",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 76,
"avg_line_length": 23.042105263157893,
"alnum_prop": 0.6500685244403838,
"repo_name": "google/balloon-learning-environment",
"id": "345e34a7d6bf14a08366aa0aaaefde67070a65d9",
"size": "2810",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "164596"
},
{
"name": "Python",
"bytes": "317743"
}
],
"symlink_target": ""
} |
"""
Django settings for djangoisfsite project.
Generated by 'django-admin startproject' using Django 1.9.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'replace-this-with-production-key'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'djangoisfsite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djangoisfsite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'data', 'djangoisfsite.db'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| {
"content_hash": "8a6d62309c9fee4f1dac030799cb47c5",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 91,
"avg_line_length": 26.305785123966942,
"alnum_prop": 0.6927426955702167,
"repo_name": "letuananh/intsem.fx",
"id": "908962e4c73caa69a2be9c8cdb6e1f57f4499bb6",
"size": "3183",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev-0.2.3",
"path": "djangoisfsite/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4388639"
},
{
"name": "Shell",
"bytes": "3771"
},
{
"name": "TSQL",
"bytes": "3718"
}
],
"symlink_target": ""
} |
from ftw import logchecker, testrunner
import pytest
import random
class LoggerTestObj(logchecker.LogChecker):
def __init__(self):
self.do_nothing = False
def generate_random_logs(self):
if self.do_nothing:
return []
else:
return [str(self.start) + ' rule-id-' +
str(random.randint(10, 99))]
def get_logs(self):
logs = self.generate_random_logs()
return logs
@pytest.fixture
def logchecker_obj():
"""
Returns a LoggerTest Integration object
"""
return LoggerTestObj()
def test_logcontains_withlog(logchecker_obj, ruleset, test):
runner = testrunner.TestRunner()
for stage in test.stages:
runner.run_stage(stage, logchecker_obj)
def test_logcontains_nolog(logchecker_obj, ruleset, test):
logchecker_obj.do_nothing = True
runner = testrunner.TestRunner()
for stage in test.stages:
runner.run_stage(stage, logchecker_obj)
| {
"content_hash": "8654785d58a398106bfa2c8f143c11ce",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 60,
"avg_line_length": 24.425,
"alnum_prop": 0.6438075742067554,
"repo_name": "CRS-support/ftw",
"id": "d414384ebd69685c1f3ecfbce838908cd246f47f",
"size": "977",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/integration/test_nologcontains.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "73544"
}
],
"symlink_target": ""
} |
#!/usr/bin/env python
import os
import csv, sys, json
def run_rnaseq_docker(basename_I,
host_dirname_I,
organism_I,
host_indexes_dir_I,
host_dirname_O,
paired_I='paired',
threads_I=2,trim3_I=3,
library_type_I='fr-firststrand',
index_type_I = '.gtf',
bowtie_options_I = '',
cufflinks_options_I = '',
):
'''Process RNA sequencing data
INPUT:
basename_I = base name of the fastq files
host_dirname_I = directory for .fastq files
organism_I = name of index
host_indexes_dir_I = directory for indexes
local_dirname_I = location for temporary output
host_dirname_O = location for output on the host
index_type_I = string for index extention (e.g., '.gtf' or '.gff')
EXAMPLE:
basename_I = 140818_11_OxicEvo04EcoliGlcM9_Broth-4
host_dirname_I = /media/proline/dmccloskey/Resequencing_RNA/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4/ (remote storage location)
organism_I = e_coli
host_indexes_dir_I = /media/proline/dmccloskey/Resequencing_RNA/indexes/ (remote storage location)
local_dirname_I = /home/douglas/Documents/Resequencing_RNA/ (local host location)
host_dirname_O = /media/proline/dmccloskey/Resequencing_RNA/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4/ (remote storage location)
'''
#1. create a container named rnaseq using sequencing utilities
#1. create a container named rnaseqdata using sequencing utilities
#2. mount the host file into rnaseqdata
#2. mount the rnaseqdata volumes into rnaseq
#3. run docker
#data_mount_1 = '/media/Sequencing/fastq/'
#data_mount_2 = '/media/Sequencing/indexes/'
#docker_mount_1 = '/home/user/Sequencing/fastq/'
#docker_mount_2 = '/home/user/Sequencing/indexes/'
#datacontainer_name = 'rnaseqdata';
docker_mount_1 = '/media/Sequencing/fastq/'
docker_mount_2 = '/media/Sequencing/indexes/'
user_output = '/home/user/Sequencing/output/'
container_name = 'rnaseq';
#make the processing container
rnaseq_cmd = ("process_rnaseq('%s','%s','%s','%s','%s',paired='%s',threads=%s,trim3=%s,library_type='%s',\
index_type='%s',bowtie_options_I='%s',cufflinks_options_I='%s');" %\
(basename_I, docker_mount_1,user_output,organism_I,docker_mount_2,paired_I,threads_I,trim3_I,library_type_I,
index_type_I,bowtie_options_I,cufflinks_options_I));
python_cmd = ("from sequencing_utilities.rnaseq import process_rnaseq;%s" %(rnaseq_cmd));
docker_run = ('docker run --name=%s -v %s:%s -v %s:%s -u=root dmccloskey/sequencing_utilities python3 -c "%s"' %(container_name,host_dirname_I,docker_mount_1,host_indexes_dir_I,docker_mount_2,python_cmd));
os.system(docker_run);
##make the data container (avoid permission errors)
#bash_cmd = ('cp -R %s %s && cp -R %s %s' %(data_mount_1,docker_mount_1,data_mount_2,docker_mount_2));
#docker_run = ('docker run --name=%s -v %s:%s -v %s:%s dmccloskey/sequencing_utilities bash -c "%s"' %(datacontainer_name,host_dirname_I,data_mount_1,host_indexes_dir_I,data_mount_2,bash_cmd));
#os.system(docker_run);
##make the processing container
#rnaseq_cmd = ("process_rnaseq('%s','%s','%s','%s','%s',paired='%s',threads=%s,trim3=%s);" %(basename_I, docker_mount_1,user_output,organism_I,docker_mount_2,paired_I,threads_I,trim3_I));
#python_cmd = ("from sequencing_utilities.rnaseq import process_rnaseq;%s" %(rnaseq_cmd));
#docker_run = ('docker run --name=%s --volumes-from=%s dmccloskey/sequencing_utilities python3 -c "%s"' %(container_name,datacontainer_name,python_cmd));
#os.system(docker_run);
#copy the gff file out of the docker container into a host location
docker_cp = ("docker cp %s:%s%s.bam %s" %(container_name,user_output,basename_I,host_dirname_O));
os.system(docker_cp);
docker_cp = ("docker cp %s:%s%s.gff %s" %(container_name,user_output,basename_I,host_dirname_O));
os.system(docker_cp);
docker_cp = ("docker cp %s:%s%s.sam %s" %(container_name,user_output,basename_I,host_dirname_O));
os.system(docker_cp);
docker_cp = ("docker cp %s:%s%s/ %s" %(container_name,user_output,basename_I,host_dirname_O));
os.system(docker_cp);
#delete the container and the container content:
cmd = ('docker rm -v %s' %(container_name));
#cmd = ('docker rm -v %s' %(datacontainer_name));
os.system(cmd);
def run_rnaseq_docker_fromCsvOrFile(filename_csv_I = None,filename_list_I = []):
'''Call run_rnaseq_docker on a list of basenames and directories
INPUT:
filename_list_I = [{basename_I:...,host_dirname_I:...,},...]
'''
if filename_csv_I:
filename_list_I = read_csv(filename_csv_I);
for row_cnt,row in enumerate(filename_list_I):
cmd = ("echo running rnaseq for basename %s" %(row['basename_I']));
os.system(cmd);
run_rnaseq_docker(row['basename_I'],
row['host_dirname_I'],
row['organism_I'],
row['host_indexes_dir_I'],
row['host_dirname_O'],
row['paired_I'],
row['threads_I'],
row['trim3_I'],
row['library_type_I'],
row['index_type_I'],
row['bowtie_options_I'],
row['cufflinks_options_I']);
def read_csv(filename):
"""read table data from csv file"""
data_O = [];
try:
with open(filename, 'r') as csvfile:
reader = csv.DictReader(csvfile);
try:
keys = reader.fieldnames;
for row in reader:
data_O.append(row);
except csv.Error as e:
sys.exit('file %s, line %d: %s' % (filename, reader.line_num, e));
except IOError as e:
sys.exit('%s does not exist' % e);
return data_O;
def main_singleFile():
"""process RNAseq data using docker
e.g. python3 run_rnaseq_docker.py '140818_11_OxicEvo04EcoliGlcM9_Broth-4' '/media/proline/dmccloskey/Resequencing_RNA/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4/' 'e_coli' '/media/proline/dmccloskey/Resequencing_RNA/indexes/' '/home/douglas/Documents/Resequencing_RNA/output/' '/media/proline/dmccloskey/Resequencing_RNA/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4/' 2 3
"""
from argparse import ArgumentParser
parser = ArgumentParser("process RNAseq data")
parser.add_argument("basename_I", help="""base name of the fastq files""")
parser.add_argument("host_dirname_I", help="""directory for .fastq files""")
parser.add_argument("organism_I", help="""name of index""")
parser.add_argument("host_indexes_dir_I", help="""directory for indexes""")
parser.add_argument("host_dirname_O", help="""location for output on the host""")
parser.add_argument("paired_I", help="""unpaired, paired, or mixed end reads (i.e., 'unpaired', 'paired', 'mixed')""")
parser.add_argument("threads_I", help="""number of processors to use""")
parser.add_argument("trim3_I", help="""trim 3 bases off of each end""")
parser.add_argument("library_type_I", help="""the library type""")
parser.add_argument("index_type_I", help="""index file type (.gtf or .gff)""")
parser.add_argument("bowtie_options_I", help="""additional command line arguments not explicitly provided""")
parser.add_argument("cufflinks_options_I", help="""additional command line arguments not explicitly provided""")
args = parser.parse_args()
run_rnaseq_docker(args.basename_I,args.host_dirname_I,
args.organism_I,args.host_indexes_dir_I,
args.host_dirname_O,
args.paired_I,
args.threads_I,args.trim3_I,
args.library_type_I,
args.index_type_I,
args.bowtie_options_I,
args.cufflinks_options_I);
def main_batchFile():
"""process RNAseq data using docker in batch
e.g. python3 run_rnaseq_docker.py '/media/proline/dmccloskey/Resequencing_RNA/rnaseq_files.csv' []
"""
from argparse import ArgumentParser
parser = ArgumentParser("process RNAseq data")
parser.add_argument("filename_csv_I", help="""list of files and parameters in a .csv""")
parser.add_argument("filename_list_I", help="""list of files and parameters e.g. [{basename_I:...,host_dirname_I:...,},...]""")
args = parser.parse_args()
run_rnaseq_docker_fromCsvOrFile(args.filename_csv_I,args.filename_list_I);
if __name__ == "__main__":
#main_singleFile();
main_batchFile(); | {
"content_hash": "6ae88741c6773f82a8557beb435e0064",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 375,
"avg_line_length": 53.78395061728395,
"alnum_prop": 0.6277975438999197,
"repo_name": "dmccloskey/sequencing_utilities",
"id": "f3e7e0379e2b37b46657bfb212e4c43285396b9c",
"size": "8715",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docker_run/run_rnaseq_docker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "161610"
},
{
"name": "Shell",
"bytes": "8860"
}
],
"symlink_target": ""
} |
import sys
import json
import codecs
import unicodecsv as csv
import metautils
from dbsettings import settings
actualcategories = []
if (len(sys.argv)<2):
print 'Usage: datagov-ckan-getDataFromSettlements all|portal datagovJSONDump.json [list,of,portals,to,exclude]'
exit()
with open(sys.argv[2], 'rb') as jsonfile:
text = jsonfile.read()
alldata = json.loads(text)
if sys.argv[1] == 'all':
portal = 'govdata.de'
excludes = []
if (len(sys.argv)>3):
excludes = sys.argv[3].replace(' ', '').split(',')
print 'Excluding following portals:'
for excludeportal in excludes:
print excludeportal
cities = metautils.getCities(alternativeFile='notcitiesingovdata.csv')
cities.extend(metautils.getCities())
#Only add data we don't have from somewhere else
data = []
excludecount = 0
uniqueportals = set()
for item in alldata:
if ('extras' in item and 'metadata_original_portal' in item['extras']):
if not any(x in item['extras']['metadata_original_portal'] for x in excludes):
data.append(item)
uniqueportals.add(item['extras']['metadata_original_portal'])
else:
excludecount += 1
else:
data.append(item)
print str(excludecount) + ' items excluded.'
print 'List of remaining portals:'
print uniqueportals
[foundItems, notfounditems] = metautils.findOnlyCityData(data, cities)
print 'Out of ' + str(len(data)) + ' catalog entries, ' + str(len(foundItems)) + ' appear to be related to the input list of settlements'
#TODO: Assign not found to DE
else:
portal = sys.argv[1]
if 'http://' in portal:
portal = portal[7:len(portal)]
data = alldata
#Search for the input term, it can include http://
foundItems = metautils.findOnlyPortalData(data, sys.argv[1])
print 'Out of ' + str(len(data)) + ' catalog entries, ' + str(len(foundItems)) + ' appear to be related to the portal specified'
#Map and write the data. Still wondering how much of this can/should be pulled out to metautils
datafordb = []
row = metautils.getBlankRow()
uniquecities = set()
metautils.setsettings(settings)
if sys.argv[1] == 'all':
#Don't use cities that have their own open data catalogs (regardless of originating portal field)
excludecities = metautils.getCitiesWithOpenDataPortals()
print 'Excluding cities with portals:'
print excludecities
else:
excludecities = []
excludecount = 0
for foundItem in foundItems:
thecity = metautils.getShortCityName(foundItem['city']['shortname'])
if thecity not in excludecities:
item = foundItem['item']
row = metautils.getBlankRow()
formatslist = []
if 'resources' in item:
for resource in item['resources']:
row['files'].append(resource['url'])
if resource['format'].upper() not in formatslist:
formatslist.append(resource['format'].upper())
[formattext, geo] = metautils.processListOfFormats(formatslist)
row[u'Format'] = formattext
row[u'geo'] = geo
row[u'Stadt'] = thecity
uniquecities.add(foundItem['city']['originalname'])
row[u'Dateibezeichnung'] = item['title']
if sys.argv[1] == 'all' or 'url' not in item or item['url'] == '':
row[u'URL PARENT'] = 'https://www.govdata.de/daten/-/details/' + item['id']
else:
row[u'URL PARENT'] = item['url']
if 'notes' in item:
row[u'Beschreibung'] = item['notes']
if 'license' in item:
row[u'Lizenz'] = item['license']
elif 'extras' in item and 'terms_of_use' in item['extras'] and 'license_id' in item['extras']['terms_of_use']:
#Really?
if type(item['extras']['terms_of_use']) != dict:
row[u'Lizenz'] = json.loads(item['extras']['terms_of_use'])['license_id']
else:
row[u'Lizenz'] = item['extras']['terms_of_use']['license_id']
else:
print 'WARNING: Could not find license in license or extras/terms of use/license id fields'
row[u'Lizenz'] = metautils.long_license_to_short(row[u'Lizenz'])
if 'maintainer' in item and item['maintainer'] != None:
row[u'Veröffentlichende Stelle'] = item['maintainer']
for group in item['groups']:
odm_cats = metautils.govDataShortToODM(group)
if len(odm_cats) > 0:
for cat in odm_cats:
row[cat] = 'x'
row[u'Noch nicht kategorisiert'] = ''
row['metadata'] = item
datafordb.append(row)
else:
excludecount += 1
if sys.argv[1] == 'all':
print str(excludecount) + ' items excluded.'
#Update city list
metautils.addCities(uniquecities, None)
#Remove this catalog's data
metautils.removeDataFromPortal(portal)
#Add data
metautils.addDataToDB(datafordb=datafordb, originating_portal=portal, checked=True, accepted=True, remove_data=True)
| {
"content_hash": "4becf45ce980938c725cec384a962e4d",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 141,
"avg_line_length": 36.330935251798564,
"alnum_prop": 0.6253465346534653,
"repo_name": "okfde/odm-datenerfassung",
"id": "c04dc3d07bf19bef5e9cd5f9e16f1286789e522d",
"size": "5075",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "readdatacatalogs/datagov-ckan-getDataFromSettlements.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "211286"
}
],
"symlink_target": ""
} |
from bndl.util.callsite import *
| {
"content_hash": "bdd8d544c01753ed0be60ff223368e2f",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 32,
"avg_line_length": 33,
"alnum_prop": 0.7878787878787878,
"repo_name": "bndl/bndl",
"id": "74b80ac00cb32f8b064576502bbc714e01faae6c",
"size": "578",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bndl/compute/explain.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Cython",
"bytes": "20307"
},
{
"name": "HTML",
"bytes": "13357"
},
{
"name": "Makefile",
"bytes": "669"
},
{
"name": "Python",
"bytes": "549892"
}
],
"symlink_target": ""
} |
'''
Module provides and registers checksum strategy classes for the context class.
'''
from ..checksum import Checksum
from .luhn10 import Luhn10
from .fact137 import Fact137
from .mod9710 import Mod9710
from .mod31 import Mod31
from .verhoeff import Verhoeff
from .fact12 import Fact12
Checksum.register_strategy(Luhn10)
Checksum.register_strategy(Fact137)
Checksum.register_strategy(Mod9710)
Checksum.register_strategy(Mod31)
Checksum.register_strategy(Verhoeff)
Checksum.register_strategy(Fact12)
| {
"content_hash": "152b200895d90252d7572204df69004e",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 78,
"avg_line_length": 27.88888888888889,
"alnum_prop": 0.8247011952191236,
"repo_name": "vaiski/checksum",
"id": "09509e98dd4173342a97edb2c5c2d73333a4d7a7",
"size": "527",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/checksum/strategies/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22963"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
__reversion__ = "$Revision: 20 $"
__author__ = "$Author: holtwick $"
__date__ = "$Date: 2007-10-09 12:58:24 +0200 (Di, 09 Okt 2007) $"
from sx.pisa3 import pisa
from sx.pisa3 import pisa_pdf
if __name__=="__main__":
pdf = pisa_pdf.pisaPDF()
subPdf = pisa.pisaDocument(
"""
Hello <strong>World</strong>
""")
pdf.addDocument(subPdf)
raw = open("test-loremipsum.pdf", "rb").read()
pdf.addFromString(raw)
pdf.addFromURI("test-loremipsum.pdf")
pdf.addFromFile(open("test-loremipsum.pdf", "rb"))
datauri = pisa.makeDataURIFromFile("test-loremipsum.pdf")
pdf.addFromURI(datauri)
# Write the result to a file and open it
filename = __file__ + ".pdf"
result = pdf.getvalue()
open(filename, "wb").write(result)
pisa.startViewer(filename)
| {
"content_hash": "9136e677671ea2598f611bf441952d4c",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 70,
"avg_line_length": 24.885714285714286,
"alnum_prop": 0.6153846153846154,
"repo_name": "22rostislav/xhtml2pdf",
"id": "0a93ef8ed80a27a8e65c4f776d982db3eeeec3ce",
"size": "1486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/pdfjoiner.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25356"
},
{
"name": "Genshi",
"bytes": "7610"
},
{
"name": "HTML",
"bytes": "469495"
},
{
"name": "Python",
"bytes": "482629"
}
],
"symlink_target": ""
} |
from helper import greeting
greeting('Hello') | {
"content_hash": "2522a10c657098f23af7e0c6674f1459",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 27,
"avg_line_length": 15.333333333333334,
"alnum_prop": 0.8043478260869565,
"repo_name": "MRaiti/cs3240-labdemo",
"id": "992441a8d88b48117f013cb11d8592ce7ab0b705",
"size": "46",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hello.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "104"
}
],
"symlink_target": ""
} |
try:
import ROOT
ROOTfound=True
except ImportError:
ROOTfound=False
print("WARNING make_histo_ofallpixels.py: ROOT not found")
import numpy as np
import numpy as np
from dicom_tools.hist_match import match_all
# from tabulate import tabulate
def make_histo_ofallpixels(data, suffix="", verbose=False, normalize=False):
nbin = 1000
if normalize:
datan = match_all(data)
else:
datan = data
binmin=data.min() *0.8
binmax=data.max() *1.2
nFette = len(data)
allhistos = []
for layer in xrange(0,nFette):
fetta = data[layer]
thishisto = ROOT.TH1F("h"+str(layer)+suffix,"h"+str(layer),nbin,binmin,binmax)
for val in fetta.ravel():
thishisto.Fill(val)
allhistos.append(thishisto)
return allhistos
| {
"content_hash": "9264fa94f1b8a3498367c1ad2f08645d",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 86,
"avg_line_length": 24.352941176470587,
"alnum_prop": 0.6316425120772947,
"repo_name": "carlomt/dicom_tools",
"id": "1f05d864077a5c49802baeb633cab1509ea61db2",
"size": "828",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dicom_tools/make_histo_ofallpixels.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "MATLAB",
"bytes": "1752"
},
{
"name": "Python",
"bytes": "2278816"
}
],
"symlink_target": ""
} |
import numpy as np
from chainer import Variable
from chainer import optimizers
from chainer import cuda
from model.dqn_agent import Q
import chainer.functions as F
from model.agent import Agent
class DQNTrainer(Agent):
def __init__(self,
agent,
memory_size=10**4,
replay_size=32,
gamma=0.99,
initial_exploration=10**4,
target_update_freq=10**4,
learning_rate=0.00025,
epsilon_decay=1e-6,
minimum_epsilon=0.1):
self.agent = agent
self.target = Q(self.agent.q.n_history, self.agent.q.n_action, on_gpu=self.agent.q.on_gpu)
self.memory_size = memory_size
self.replay_size = replay_size
self.gamma = gamma
self.initial_exploration = initial_exploration
self.target_update_freq = target_update_freq
self.learning_rate = learning_rate
self.epsilon_decay = epsilon_decay
self.minimum_epsilon = minimum_epsilon
self._step = 0
# prepare memory for replay
n_hist = self.agent.q.n_history
size = self.agent.q.SIZE
self.memory = [
np.zeros((memory_size, n_hist, size, size), dtype=np.float32),
np.zeros(memory_size, dtype=np.uint8),
np.zeros((memory_size, 1), dtype=np.float32),
np.zeros((memory_size, n_hist, size, size), dtype=np.float32),
np.zeros((memory_size, 1), dtype=np.bool)
]
self.memory_text = [
"state", "action", "reward", "next_state", "episode_end"
]
# prepare optimizer
self.optimizer = optimizers.RMSpropGraves(lr=learning_rate, alpha=0.95, momentum=0.95, eps=0.01)
self.optimizer.setup(self.agent.q)
self._loss = 9
self._qv = 0
def calc_loss(self, states, actions, rewards, next_states, episode_ends):
qv = self.agent.q(states)
q_t = self.target(next_states) # Q(s', *)
max_q_prime = np.array(list(map(np.max, q_t.data)), dtype=np.float32) # max_a Q(s', a)
target = cuda.to_cpu(qv.data.copy())
for i in range(self.replay_size):
if episode_ends[i][0] is True:
_r = np.sign(rewards[i])
else:
_r = np.sign(rewards[i]) + self.gamma * max_q_prime[i]
target[i, actions[i]] = _r
td = Variable(self.target.arr_to_gpu(target)) - qv
td_tmp = td.data + 1000.0 * (abs(td.data) <= 1) # Avoid zero division
td_clip = td * (abs(td.data) <= 1) + td/abs(td_tmp) * (abs(td.data) > 1)
zeros = Variable(self.target.arr_to_gpu(np.zeros((self.replay_size, self.target.n_action), dtype=np.float32)))
loss = F.mean_squared_error(td_clip, zeros)
self._loss = loss.data
self._qv = np.max(qv.data)
return loss
def start(self, observation):
return self.agent.start(observation)
def act(self, observation, reward):
if self.initial_exploration <= self._step:
self.agent.epsilon -= 1.0/10**6
if self.agent.epsilon < self.minimum_epsilon:
self.agent.epsilon = self.minimum_epsilon
return self.train(observation, reward, episode_end=False)
def end(self, observation, reward):
self.train(observation, reward, episode_end=True)
def train(self, observation, reward, episode_end):
action = 0
last_state = self.agent.get_state()
last_action = self.agent.last_action
if not episode_end:
action = self.agent.act(observation, reward)
result_state = self.agent.get_state()
self.memorize(
last_state,
last_action,
reward,
result_state,
False)
else:
self.memorize(
last_state,
last_action,
reward,
last_state,
True)
if self.initial_exploration <= self._step:
self.experience_replay()
if self._step % self.target_update_freq == 0:
self.target.copyparams(self.agent.q)
self._step += 1
return action
def memorize(self, state, action, reward, next_state, episode_end):
_index = self._step % self.memory_size
self.memory[0][_index] = state
self.memory[1][_index] = action
self.memory[2][_index] = reward
if not episode_end:
self.memory[3][_index] = next_state
self.memory[4][_index] = episode_end
def experience_replay(self):
indices = []
if self._step < self.memory_size:
indices = np.random.randint(0, self._step, (self.replay_size))
else:
indices = np.random.randint(0, self.memory_size, (self.replay_size))
states = []
actions = []
rewards = []
next_states = []
episode_ends = []
for i in indices:
states.append(self.memory[0][i])
actions.append(self.memory[1][i])
rewards.append(self.memory[2][i])
next_states.append(self.memory[3][i])
episode_ends.append(self.memory[4][i])
to_np = lambda arr: np.array(arr)
self.optimizer.target.cleargrads()
loss = self.calc_loss(to_np(states), to_np(actions), to_np(rewards), to_np(next_states), to_np(episode_ends))
loss.backward()
self.optimizer.update()
def report(self, episode):
s = "{0}: loss={1}, q value={2}, epsilon={3}".format(self._step, self._loss, self._qv, self.agent.epsilon)
self.agent.save(episode)
return s
| {
"content_hash": "5c656218eb08c0aac9c2dd58476b9951",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 118,
"avg_line_length": 36.425,
"alnum_prop": 0.5487302676733014,
"repo_name": "icoxfog417/chainer_pong",
"id": "a0fd1e57d4f1cb570c1f66c0b911b5e5dd3849dd",
"size": "5828",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model/dqn_trainer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22962"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, absolute_import
import os
import django
DEBUG = True
USE_TZ = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "=================================================="
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"django_dandelion",
]
SITE_ID = 1
if django.VERSION >= (1, 10):
MIDDLEWARE = ()
else:
MIDDLEWARE_CLASSES = ()
DANDELION_TOKEN = os.environ['DANDELION_TOKEN']
| {
"content_hash": "b938a87a62fdb61c4cb4bc223422a995",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 66,
"avg_line_length": 18.428571428571427,
"alnum_prop": 0.5906976744186047,
"repo_name": "AlessioBazzanella/django-dandelion",
"id": "c7beb3ba640173059f1574bbc126b9e6d5655fec",
"size": "666",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1594"
},
{
"name": "Python",
"bytes": "64523"
}
],
"symlink_target": ""
} |
from test_framework.test_framework import DankcoinTestFramework
from test_framework.util import (
start_nodes,
start_node,
assert_equal,
connect_nodes_bi,
)
import os
import shutil
class WalletHDTest(DankcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
self.node_args = [['-usehd=0'], ['-usehd=1', '-keypool=0']]
def setup_network(self):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, self.node_args)
self.is_network_split = False
connect_nodes_bi(self.nodes, 0, 1)
def run_test (self):
tmpdir = self.options.tmpdir
# Make sure we use hd, keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdmasterkeyid']
assert_equal(len(masterkeyid), 40)
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(tmpdir + "hd.bak")
#self.nodes[1].dumpwallet(tmpdir + "hd.dump")
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
num_hd_adds = 300
for i in range(num_hd_adds):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].validateaddress(hd_add)
assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i+1)+"'")
assert_equal(hd_info["hdmasterkeyid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
print("Restore backup ...")
self.stop_node(1)
os.remove(self.options.tmpdir + "/node1/regtest/wallet.dat")
shutil.copyfile(tmpdir + "hd.bak", tmpdir + "/node1/regtest/wallet.dat")
self.nodes[1] = start_node(1, self.options.tmpdir, self.node_args[1])
#connect_nodes_bi(self.nodes, 0, 1)
# Assert that derivation is deterministic
hd_add_2 = None
for _ in range(num_hd_adds):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_+1)+"'")
assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
# Needs rescan
self.stop_node(1)
self.nodes[1] = start_node(1, self.options.tmpdir, self.node_args[1] + ['-rescan'])
#connect_nodes_bi(self.nodes, 0, 1)
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
if __name__ == '__main__':
WalletHDTest().main ()
| {
"content_hash": "acf9aa6429df25445a7071f6b0c6ca01",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 91,
"avg_line_length": 36.51219512195122,
"alnum_prop": 0.5988643954575819,
"repo_name": "dankcoin/dankcoin",
"id": "3b33f8599a23e92198aa32b03c1014e7b256cb65",
"size": "3204",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/wallet-hd.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "679373"
},
{
"name": "C++",
"bytes": "4691594"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "3831"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2101"
},
{
"name": "M4",
"bytes": "173500"
},
{
"name": "Makefile",
"bytes": "102963"
},
{
"name": "Objective-C",
"bytes": "3777"
},
{
"name": "Objective-C++",
"bytes": "7242"
},
{
"name": "Protocol Buffer",
"bytes": "2312"
},
{
"name": "Python",
"bytes": "908583"
},
{
"name": "QMake",
"bytes": "2021"
},
{
"name": "Shell",
"bytes": "34197"
}
],
"symlink_target": ""
} |
"""Performance tests for mask_password.
"""
import timeit
from oslo_utils import strutils
# A moderately sized input (~50K) string
# http://paste.openstack.org/raw/155864/
# infile = '155864.txt'
# Untruncated version of the above (~310K)
# http://dl.sileht.net/public/payload.json.gz
infile = 'large_json_payload.txt'
with open(infile, 'r') as f:
input_str = f.read()
print('payload has %d bytes' % len(input_str))
for pattern in strutils._SANITIZE_PATTERNS_2['admin_pass']:
print('\ntesting %s' % pattern.pattern)
t = timeit.Timer(
r"re.sub(pattern, r'\g<1>***\g<2>', payload)",
"""
import re
payload = '''%s'''
pattern = re.compile(r'''%s''')
""" % (input_str, pattern.pattern))
print(t.timeit(1))
t = timeit.Timer(
"strutils.mask_password('''" + input_str + "''')",
"from oslo_utils import strutils",
)
print(t.timeit(1))
| {
"content_hash": "af905749243c18897e7ae02335cfc47f",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 59,
"avg_line_length": 24.942857142857143,
"alnum_prop": 0.6403207331042382,
"repo_name": "openstack/oslo.utils",
"id": "677f67e3e5b225d455e257f959c6c17299b1da4a",
"size": "1494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/perf_test_mask_password.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "334957"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, with_statement
from revolver.core import run
from revolver import contextmanager as ctx
from revolver import directory as dir
from revolver import file, package
from revolver.tool import php_build, php_phpenv
def install(version, fpm=False, xdebug=False):
php_build.ensure()
php_phpenv.ensure()
switched = run("phpenv global %s; true" % version)
if not switched == "":
_install_php(version, fpm, xdebug)
run("phpenv global %s" % version)
run("phpenv rehash")
_install_apc()
_install_composer()
def _install_php(version, fpm, xdebug):
package.ensure([
"build-essential", "lemon", "libbz2-dev", "libpcre3-dev",
"libc-client2007e-dev", "libcurl4-gnutls-dev", "libexpat1-dev",
"libfreetype6-dev", "libgmp3-dev", "libicu-dev", "libjpeg8-dev",
"libltdl-dev", "libmcrypt-dev", "libmhash-dev", "libpng12-dev",
"libreadline-dev", "libssl1.0.0", "libssl-dev", "libt1-dev",
"libtidy-dev", "libxml2-dev", "libxslt1-dev", "re2c", "zlib1g-dev"
])
def configure(value):
key = "PHP_BUILD_CONFIGURE_OPTS"
return 'export %(key)s="%(value)s $%(key)s"' % locals()
prefix = "$HOME/.phpenv/versions/%s" % version
# Force the usage of pear because pyrus is unable to install APC
# See https://github.com/CHH/php-build/blob/master/man/php-build.1.ronn#L79
pear_path = "%s/pear" % prefix
pear = configure("--with-pear=%s" % pear_path)
dir.ensure(pear_path, recursive=True)
# We only support this two configuration options! Why?
# - Xdebug is already integrated into php-build
# - FPM is a very common flag
#
# But if you want to configure php even further? Own definition files!
# See https://github.com/CHH/php-build/blob/master/man/php-build.1.ronn#L54
fpm = (fpm and configure("--enable-fpm")) or "true"
xdebug = (xdebug and "true") or 'export PHP_BUILD_XDEBUG_ENABLE="off"'
with ctx.prefix(pear):
with ctx.prefix(xdebug):
with ctx.prefix(fpm):
run("php-build %s %s" % (version, prefix))
# Some executables (like php-fpm) aren't available through phpenv without
# this symlinks
with ctx.cd(prefix):
run('find sbin/ -type f -exec ln -sf "$(pwd)/{}" -t "$(pwd)/bin" \;')
def _install_apc():
installed = run("pecl list | grep -i apc; true")
if installed:
return
run("yes '' | pecl install apc")
bin_path = run("phpenv which php")
conf_path = bin_path.replace("/bin/php", "/etc/conf.d")
file.write(conf_path + "/apc.ini", "extension=apc.so")
def _install_composer():
# TODO Implement this
pass
| {
"content_hash": "380d2723c347eca246a45d4f31c5ae0e",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 79,
"avg_line_length": 33.9125,
"alnum_prop": 0.6336159233321047,
"repo_name": "michaelcontento/revolver",
"id": "e9bb43094a6c8b40c4fe21561cc69bc1eda7e0c8",
"size": "2738",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "revolver/tool/php.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "10174"
},
{
"name": "Python",
"bytes": "68675"
}
],
"symlink_target": ""
} |
from wdom.tag import NewTagClass as NewTag
from wdom.themes import *
name = 'INK'
project_url = 'http://ink.sapo.pt/'
project_repository = 'https://github.com/sapo/Ink/'
license = 'MIT License'
license_url = 'https://github.com/sapo/Ink/blob/develop/LICENSE'
css_files = [
'//fastly.ink.sapo.pt/3.1.10/css/ink.css',
]
js_files = [
'//fastly.ink.sapo.pt/3.1.10/js/ink-all.js',
]
headers = []
Button = NewTag('Button', 'button', Button, class_='ink-button')
DefaultButton = NewTag('DefaultButton', 'button', Button, is_='default-button')
PrimaryButton = NewTag('PrimaryButton', 'button', Button, class_='blue', is_='primary-button')
SecondaryButton = NewTag('SecondaryButton', 'button', Button, is_='secondary-button')
SuccessButton = NewTag('SuccessButton', 'button', Button, class_='green', is_='success-button')
InfoButton = NewTag('InfoButton', 'button', Button, class_='black', is_='info-button')
WarningButton = NewTag('WarningButton', 'button', Button, class_='orange', is_='warning-button')
DangerButton = NewTag('DangerButton', 'button', Button, class_='red', is_='danger-button')
ErrorButton = NewTag('ErrorButton', 'button', Button, class_='red', is_='error-button')
LinkButton = NewTag('LinkButton', 'button', Button, is_='link-button')
Form = NewTag('Form', 'form', Form, class_='ink-form')
FormGroup = NewTag('FormGroup', 'div', FormGroup, class_='control-group')
Input = NewTag('Input', 'input', Input)
TextInput = NewTag('TextInput', 'input', TextInput)
Textarea = NewTag('Textarea', 'textarea', Textarea)
Select = NewTag('Select', 'ul', Ul, class_='dropdown-menu')
Option = NewTag('Option', 'li', Li)
Table = NewTag('Table', 'table', Table, class_='ink-table')
Container = NewTag('Container', 'div', Container, class_='ink-grid')
Wrapper = NewTag('Wrapper', 'div', Wrapper, class_='ink-grid')
Row = NewTag('Row', 'div', Row, class_='column-group')
Col1 = NewTag('Col1', 'div', Col1, class_='all-10')
Col2 = NewTag('Col2', 'div', Col2, class_='all-15')
Col3 = NewTag('Col3', 'div', Col3, class_='all-25')
Col4 = NewTag('Col4', 'div', Col4, class_='all-33')
Col5 = NewTag('Col5', 'div', Col5, class_='all-40')
Col6 = NewTag('Col6', 'div', Col6, class_='all-50')
Col7 = NewTag('Col7', 'div', Col7, class_='all-60')
Col8 = NewTag('Col8', 'div', Col8, class_='all-66')
Col9 = NewTag('Col9', 'div', Col9, class_='all-75')
Col10 = NewTag('Col10', 'div', Col10, class_='all-85')
Col11 = NewTag('Col11', 'div', Col11, class_='all-90')
Col12 = NewTag('Col12', 'div', Col12, class_='all-100')
extended_classes = [
Button,
DefaultButton,
PrimaryButton,
SecondaryButton,
SuccessButton,
InfoButton,
WarningButton,
DangerButton,
ErrorButton,
LinkButton,
Form,
FormGroup,
Input,
TextInput,
Textarea,
Select,
Option,
Table,
Container,
Wrapper,
Row,
Col1,
Col2,
Col3,
Col4,
Col5,
Col6,
Col7,
Col8,
Col9,
Col10,
Col11,
Col12,
]
| {
"content_hash": "03cc67f6f6fc0d462a3afef3903cacfc",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 96,
"avg_line_length": 33.38202247191011,
"alnum_prop": 0.6486031639178728,
"repo_name": "miyakogi/wdom",
"id": "8c08109b1692557571e119db01297a4103e0cedf",
"size": "3034",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "wdom/themes/ink.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "540"
},
{
"name": "HTML",
"bytes": "1316"
},
{
"name": "JavaScript",
"bytes": "16122"
},
{
"name": "Makefile",
"bytes": "2938"
},
{
"name": "Python",
"bytes": "512498"
}
],
"symlink_target": ""
} |
"""Test p4lib.py's interface to 'p4 client'."""
import os
import unittest
import testsupport
from p4lib import P4, P4LibError
class ClientTestCase(unittest.TestCase):
#TODO:
# - test expected error from trying to change a locked client from a
# user that is not the owner
# - have the super user change a locked client with and without force
def test_get_client(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
name = p4.clients()[0]['client']
client = p4.client(name=name)
self.failUnless(client['client'] == name)
self.failUnless('access' in client)
self.failUnless('description' in client)
self.failUnless('lineend' in client)
self.failUnless('options' in client)
self.failUnless('owner' in client)
self.failUnless('root' in client)
self.failUnless('update' in client)
self.failUnless('view' in client)
finally:
os.chdir(top)
def test_delete_recreate_client(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
name = testsupport.users['bertha']['client']
clientBefore = p4.client(name=name)
# Delete the client.
result = p4.client(name=name, delete=1)
self.failUnless(result['client'] == name)
self.failUnless(result['action'] == "deleted")
# Re-create the client.
result = p4.client(client=clientBefore)
self.failUnless(result['client'] == name)
self.failUnless(result['action'] == "saved")
# Check that the client is now still the same.
clientAfter = p4.client(name=name)
self.failUnless(clientBefore['description'] ==
clientAfter['description'])
self.failUnless(clientBefore['lineend'] ==
clientAfter['lineend'])
self.failUnless(clientBefore['options'] ==
clientAfter['options'])
self.failUnless(clientBefore['owner'] ==
clientAfter['owner'])
self.failUnless(clientBefore['root'] ==
clientAfter['root'])
self.failUnless(clientBefore['view'] ==
clientAfter['view'])
finally:
os.chdir(top)
def test_update_client(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
desc = 'test_update_client'
name = testsupport.users['bertha']['client']
clientBefore = p4.client(name=name)
# Update the client.
result = p4.client(name=name, client={'description':desc})
self.failUnless(result['client'] == name)
self.failUnless(result['action'] == "saved")
# Cleanup.
result = p4.client(client=clientBefore)
finally:
os.chdir(top)
def test_update_client_no_change(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
desc = 'test_update_client'
name = testsupport.users['bertha']['client']
clientBefore = p4.client(name=name)
# Update the client.
result = p4.client(name=name, client={})
self.failUnless(result['client'] == name)
self.failUnless(result['action'] == "not changed")
# Cleanup.
result = p4.client(client=clientBefore)
finally:
os.chdir(top)
@unittest.skip('Cannot reach the limit with 2014.2 p4d version')
def test_create_client_hit_license_limit(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
client_base_name = 'test_create_client_hit_license_limit'
try:
os.chdir(andrew['home'])
# Without license, version is limited to 20 clients.
# We have two already.
for client_num in range(18):
client = {
'client': client_base_name + str(client_num),
'description': 'test_create_client_hit_license_limit',
}
p4.client(client=client)
# So this one is in excess and should fail
self.failUnlessRaises(P4LibError, p4.client, client=client)
finally:
os.chdir(top)
def test_get_client_use_label_name(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Create a label.
name = "test_get_client_use_label_name"
labelDict = {
"label": name,
"description": name,
"view": "//depot/...",
}
p4.label(label=labelDict)
self.failUnlessRaises(P4LibError, p4.client, name=name)
finally:
os.chdir(top)
def suite():
"""Return a unittest.TestSuite to be used by test.py."""
return unittest.makeSuite(ClientTestCase)
| {
"content_hash": "d0390e7e5c8f7d48d4560fa9ea9e8a20",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 74,
"avg_line_length": 32.42261904761905,
"alnum_prop": 0.5342390306590784,
"repo_name": "Mokona/python-p4lib",
"id": "0134a04c2406cdb02787eb1dc0747b192d5d0e5c",
"size": "5646",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functionnal/test_p4lib_client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "21622"
},
{
"name": "Python",
"bytes": "359429"
},
{
"name": "Shell",
"bytes": "1306"
}
],
"symlink_target": ""
} |
import sys
import time
from itertools import *
from colorama import *
from fuzzywuzzy import fuzz
from fuzzywuzzy import process
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import csv
from playhouse.csv_loader import *
from bs4 import *
from db import *
cg=Fore.GREEN
cr=Fore.RED
rs=Style.RESET_ALL
cy=Fore.YELLOW
cb=Fore.BLUE
def informationXchange(generation,list1):
first_run_var=1
aa = None
ab = None
ac = None
ad = None
ae = None
af = None
ag = None
ah = None
for x in generation:
a = x.labs
b = x.credit_hours
c = x.lecture_hours
d = x.title
e = x.comments
f = x.course
g = x.other_info
h = x.previous_course
if first_run_var == 1:
aa = a
ab = b
ac = c
ad = d
ae = e
af = f
ag = g
ah = h
first_run_var = 2
else:
i = str(x.id)
if aa != a:
list1.append(i + ':' + 'labs')
aa = a
if ab != b:
list1.append(i + ':' + 'credithours')
ab = b
if ac != c:
list1.append(i + ':' + 'lecturehours')
ac = c
if ad != d:
list1.append(i + ':' + 'title')
ad = d
if ae != e:
list1.append(i + ':' + 'comments')
ae = e
if af != f:
list1.append(i + ':' + 'courseid')
af = f
if ag != g:
list1.append(i + ':' + 'other info')
ag = g
if ah != h:
list1.append(i + ':' + 'previous id')
ah = h
return list1
def offer(year,code,session,profid,numberofstudents,sectionnumbers):
for x in CourseGeneration.select().join(Course).where(Course.code==code).order_by(CourseGeneration.end_year.asc()):
if int(x.end_year)>=int(year):
ses=Semester.select().where(Semester.year==year,Semester.session==session).get()
try:
Offering.get_or_create(enrolment=numberofstudents,semester=ses,generation=x.id,sections=sectionnumbers,reviewed=False)
A=Offering.select().where(Offering.enrolment==numberofstudents,Offering.semester==ses,Offering.generation==x.id,Offering.sections==sectionnumbers).get()
except:
A=Offering.select().where(Offering.enrolment==numberofstudents,Offering.semester==ses,Offering.generation==x.id,Offering.sections==sectionnumbers).get()
Activity.get_or_create(subject=profid,offering=A)
return A.id
def weight_calc(OID):
off=Offering.select().where(Offering.id==OID).get()
gen=CourseGeneration.select().join(off).where(CourseGeneration.id==off.generation).get()
b1 = gen.credit_hours
c1 = gen.labs
d1= gen.other_info
wd1 = 0
if d1 == 'up to eight tutorial sessions per semester':
wd1 = float(.07)
if d1 == 'tutorial one hour per week':
wd1 = float(.14)
if d1 == 'tutorial 1 hour per week':
wd1 = float(.14)
if d1 == 'one tutorial hour per week':
wd1 = float(.14)
if d1 == 'one tutorial hour per week':
wd1 = float(.14)
if d1 == 'one tutorial hour per week':
wd1 = float(.14)
if d1 == '1 client meeting per week, 1 tutorial per week':
wd1 = float(.14)
weight1=fix(off.enrolment,off.sections,b1,c1,wd1)
return weight1
def fix(numberofstudents, sectionnumbers, b1, c1, wd1):
wa1=float(float(b1)/float(3))
numberofstudents=float(numberofstudents)
if numberofstudents > 75:
we = float((((float(b1) + (numberofstudents - float(75))) / float(75)) * .5))
else:
we = 0
wb1 = (((float(b1) + float(c1)) / float(36)) * .27) * float(sectionnumbers)
wc1 = (((float(b1) + float(wd1)) / float(12)) * .14)
weight1 = wb1 + wc1 + we+wa1
if numberofstudents < 5:
weight1 = 0
return weight1
def Psuper(BOOLDoyouwanttocreateanewone, Description ,Weight):
ProjectClass.get_or_create(description='Undergraduate project course', weight=0.5)
ProjectClass.get_or_create(description='senior project supervision of a group of 4 students', weight=(float(0.125)/3))
ProjectClass.get_or_create(description='Case by Case', weight=2)
if BOOLDoyouwanttocreateanewone:
ProjectClass.get_or_create(description=Description, weight=Weight)
def superC(BOOLDoyouwanttocreateanewone, Description, Weight):
SupervisionClass.get_or_create(description='Gradstudent 1 term', weight=0.047)
SupervisionClass.get_or_create(description='Masters 1 term', weight=0.07)
SupervisionClass.get_or_create(description='Doctoral 1 term', weight=(float(.32)/3))
if BOOLDoyouwanttocreateanewone:
SupervisionClass.get_or_create(description=Description, weight=Weight)
def supera(TermS, profid, Studentid, supervisoncalss, session):
ses=Semester.select().where(Semester.year==TermS,Semester.session==session).get()
Supervision.get_or_create(student_id=Studentid,supervision_class_id=supervisoncalss,semester=ses)
A=Supervision.select().where(Supervision.student_id==Studentid,Supervision.supervision_class_id==supervisoncalss,Supervision.semester==ses).get()
Activity.get_or_create(subject=profid,supervision=A, split=1)
def Psupera(TermS, profid, team_id, supervisoncalss, session):
ses=Semester.select().where(Semester.year==TermS,Semester.session==session).get()
ProjectSupervision.get_or_create(team_id=team_id,project_class_id=supervisoncalss,semester=ses)
A=ProjectSupervision.select().where(ProjectSupervision.team_id==team_id,ProjectSupervision.project_class_id==supervisoncalss,ProjectSupervision.semester==ses).get()
Activity.get_or_create(subject=profid,project=A, split=1)
def person(name, email, staryear, startsem):
# can't hear
try:Semester.get_or_create(year= staryear, session = startsem)
except:pass
ses=Semester.select().where(Semester.year==staryear,Semester.session==startsem).get()
try:Person.get_or_create(name=name,email=email,start=ses.id,reviewed=False)
except:pass
def student(name, email):
# use sign language
Student.get_or_create(name=name,email=email)
def team(name, email):
# use sign language
ProjectType.get_or_create(name=name,description=email)
def deficit_func(prof_id,year_first,year_second):
now = datetime.datetime.now()
defic = PersonalLoad.select().join(Person).where(Person.id == prof_id,PersonalLoad.end.year<=year_second)
totaldef = 0
for x in defic:
if x.applied_final<year_first:
pass
elif x.applied_start<=year_first:
totaldef+=x.deficit*(x.applied_final-year_first+1)
else:
totaldef+=x.deficit*(x.applied_final-x.applied_start+1)
defic2=PersonalLoad.select().join(Person).where(Person.id == prof_id, PersonalLoad.end.year==None).get()
if year_second >= defic2.applied_start:
totaldef+=defic2.deficit*(year_second-defic2.applied_start)
return totaldef
def currentsem():
now = datetime.datetime.now()
month = now.month
if month>=7:
sem=1
elif month<=4:
sem=2
else:
sem=3
return sem
def import_file(selector):
name = selector+'.csv'
if selector == 'Person':
load_csv(Person, name)
if selector == 'Supervision':
load_csv(Supervision, name)
if selector == 'SupervisionClass':
load_csv(SupervisionClass, name)
if selector == 'Course':
load_csv(Course, name)
if selector == 'CourseGeneration':
load_csv(CourseGeneration, name)
if selector == 'Student':
load_csv(Student, name)
if selector == 'Semester':
load_csv(Semester, name)
if selector == 'Offering':
load_csv(Offering, name)
if selector == 'Role':
load_csv(Role, name)
if selector == 'ProjectClass':
load_csv(ProjectClass, name)
if selector == 'ProjectType':
load_csv(ProjectType, name)
if selector == 'Activity':
load_csv(Activity, name)
if selector == 'ProjectSupervision':
load_csv(ProjectSupervision, name)
if selector == 'Adjustment':
load_csv(Adjustment, name)
def export_file(selector, name='default'):
with open(str(name)+'.csv', 'w') as fh:
if str(selector) == 'Person':
query = Person.select().order_by(Person.id)
dump_csv(query, fh)
if str(selector) == 'Supervision':
query = Supervision.select().order_by(Supervision.id)
dump_csv(query, fh)
if str(selector) == 'SupervisionClass':
query = SupervisionClass.select().order_by(SupervisionClass.id)
dump_csv(query, fh)
if str(selector) == 'Course':
query = Course.select().order_by(Course.code)
dump_csv(query, fh)
if str(selector) == 'CourseGeneration':
query = CourseGeneration.select().order_by(CourseGeneration.id)
dump_csv(query, fh)
if str(selector) == 'Student':
query = Student.select().order_by(Student.id)
dump_csv(query, fh)
if str(selector) == 'Semester':
query = Semester.select().order_by(Semester.id)
dump_csv(query, fh)
if str(selector) == 'Offering':
query = Offering.select().order_by(Offering.id)
dump_csv(query, fh)
if str(selector) == 'Role':
query = Role.select().order_by(Role.id)
dump_csv(query, fh)
if str(selector) == 'ProjectClass':
query = ProjectClass.select().order_by(ProjectClass.id)
dump_csv(query, fh)
if str(selector) == 'ProjectType':
query = ProjectType.select().order_by(ProjectType.id)
dump_csv(query, fh)
if str(selector) == 'Activity':
query = Activity.select().order_by(Activity.subject)
dump_csv(query, fh)
if str(selector) == 'ProjectSupervision':
query = ProjectSupervision.select().order_by(ProjectSupervision.id)
dump_csv(query, fh)
if str(selector) == 'Adjustment':
query = Adjustment.select().order_by(Adjustment.id)
dump_csv(query, fh)
else:
dump_csv(selector, fh)
def anyplot(semester,name,weights):
# rename need test data again
var = None
p1 = re.compile(r"\w+")
p2 = p1.findall(name)
listany=list()
counter=-1
for x,y in enumerate(semester):
if x % 2==0:
var=y
else:
counter+=1
listany.append(str(var)+'0'+str(y))
listany.append(weights[counter])
list3,list4=matchandsort(listany)
width = 1
if p2[0]=='project':
stack=3
else:
stack=1.5
N = len(list4)
ind = np.arange(N)
plt.bar(left=ind, height=list3, width=width, color='#d62728')
plt.ylabel('Credit Value')
plt.xlabel('Semester in format (year)(semester id)')
plt.title(p2[0])
plt.yticks(np.arange(0, stack, 0.125))
plt.xticks(ind, list4, rotation='vertical')
plt.savefig(str(name) + '.pdf', bbox_inches='tight')
plt.close()
def matchandsort(diction_of_var):
var =- 2
terms = list()
values = list()
for i, j in enumerate(diction_of_var):
if i %2==0:
try:
var=terms.index(j)
except:
var=-2
terms.append(j)
else:
if var != -2:
values[var]+=j
else:
values.append(j)
total_weight = list()
year_term = list()
for (y, x) in sorted(zip(terms, values)):
total_weight.append(x)
year_term.append(y)
return total_weight,year_term
def offerplot(dict_temp2,name,scale='default'):
if scale=='default':
workaround1 = 0
workaround2 = 6
workaround3 = 0.25
elif scale=='offer':
workaround1 = 0
workaround2 = -1000
workaround3 = -125
else:
print cr+'scale not reconized'
workaround1 = 0
workaround2 = 6
workaround3 = 0.25
p1 = re.compile(r"\w+")
p2 = p1.findall(name)
width = 1
total_weight,year_term=matchandsort(dict_temp2)
N = len(year_term)
ind = np.arange(N)
plt.bar(left=ind,height=total_weight,width=width,color='#d62728')
plt.ylabel('Credit Value')
plt.xlabel('Semester Term')
plt.title(p2[0])
plt.yticks(np.arange(workaround1,workaround2,workaround3))
plt.xticks(ind, year_term, rotation='vertical')
plt.savefig(str(name)+'.pdf',bbox_inches='tight')
plt.close()
def set_false():
person=Person.select()
for x in person:
if x.reviewed == True:
a=Person.update(reviewed=False).where(Person.id==x.id)
a.execute()
course=Course.select()
for x in course:
if x.reviewed == True:
a=Course.update(reviewed=False).where(Course.id==x.id)
a.execute()
coursegen=CourseGeneration.select()
for x in coursegen:
if x.reviewed == True:
a=CourseGeneration.update(reviewed=False).where(CourseGeneration.id==x.id)
a.execute()
offering=Offering.select()
for x in offering:
if x.reviewed == True:
a=Offering.update(reviewed=False).where(Offering.id==x.id)
a.execute()
def termselect(year):
if year == 'true':
term=Semester.select()
else:
term=Semester.select().where(Semester.year<=year)
return term
def percent():
list_teach = list()
file = open('Faculty and Staff List.csv')
person = Person.select()
p2 = re.compile(r"[A-Z]\s")
p3 = re.compile(r".[A-Z]")
p4 = re.compile(r"^[A-Z]")
p5 = re.compile(r"[,][A-z-0-9].+")
emaildict=dict()
for row in file:
p1 = re.compile(r"[A-z]+[,][ ][A-z]+")
p3 = re.compile(r".[A-Z]")
rowE = str(p5.findall(row)).strip("''[],")
row1 = str(p1.findall(row)).strip("''[],")
emaildict[row1]=rowE
list_teach.append(row1)
for peps in person:
pep_first=p2.findall(peps.name)
pep_last = p3.findall(peps.name)
guess=process.extractOne(peps.name,list_teach)
if guess[1]>50:
row3 = p3.findall(guess[0])
row4= p4.findall(guess[0])
pep_first=str(pep_first).strip("''[]u ")
row3=str(row3).strip("''[]u ")
pep_last=str(pep_last).strip("''[]u ")
row4=str(row4).strip("''[]u ")
if row3 == pep_first and row4==pep_last:
print rs+'I am confident that '+cg+str(peps.name)+rs+' is '+cg+str(guess[0])
a = Person.update(reviewed=True, email=emaildict.get(guess[0])).where(Person.name == peps.name)
a.execute()
| {
"content_hash": "f4cbb622368b0eb63512ba18e79f0ac6",
"timestamp": "",
"source": "github",
"line_count": 437,
"max_line_length": 168,
"avg_line_length": 34.27231121281464,
"alnum_prop": 0.5944448153835882,
"repo_name": "memorial-ece/teaching-equivalencies",
"id": "b283e5dfcc5d43c9d6b1d645f6a6e01d5da3732c",
"size": "15588",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Core.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1369"
},
{
"name": "HTML",
"bytes": "39059"
},
{
"name": "JavaScript",
"bytes": "20214"
},
{
"name": "Python",
"bytes": "86507"
}
],
"symlink_target": ""
} |
from tensorforce.environments import Environment
class StateSettableEnvironment(Environment):
"""
An Environment that implements the set_state method to set the current state
to some new state using setter instructions.
"""
def set_state(self, **kwargs):
"""
Sets the current state of the environment manually to some other state and returns a new observation.
Args:
**kwargs: The set instruction(s) to be executed by the environment.
A single set instruction usually set a single property of the
state/observation vector to some new value.
Returns: The observation dictionary of the Environment after(!) setting it to the new state.
"""
raise NotImplementedError
| {
"content_hash": "ef64736cd584ddb89c0f3c6a503c1e7d",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 109,
"avg_line_length": 39.65,
"alnum_prop": 0.669609079445145,
"repo_name": "lefnire/tensorforce",
"id": "dc679c808854e5a6d69e8da671bf6293360914aa",
"size": "1473",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorforce/contrib/state_settable_environment.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "753422"
}
],
"symlink_target": ""
} |
import scrapy
import os
import datetime
from scrapy.selector import Selector
from dmm_joke.items import DVDDetailListItem
from scrapy.spiders import CrawlSpider,Rule
from scrapy.linkextractors import LinkExtractor
from scrapy_redis.spiders import RedisCrawlSpider
class DvdListSpiderSpider(CrawlSpider):
name = "dvd_list_spider"
allowed_domains = ["dmm.com"]
start_urls = ['http://www.dmm.com/rental/ppr/-/list/=/article=category/id=japanese/limit=120/sort=date/page=2']
# start_urls = ["file:///tmp/dvd_genre.html"]
rules = (
Rule(LinkExtractor(allow=('rental/ppr/-/list/=/article=category/id=japanese/limit=120/sort=date/page', ),
deny =('limit=30','limit=60','view=text',)
), callback='parse_item',follow=True),
)
def parse_item(self, response):
sel = Selector(response)
items = []
sel_list = sel.xpath("//*[@id='list']/li")
for m_field in sel_list:
d_item = DVDDetailListItem()
d_item['m_type'] = 'dvd_list'
d_item['link'] = m_field.xpath("div/p[@class='tmb']/a/@href").extract()[0]
d_item['img_url'] = m_field.xpath("div/p[@class='tmb']/a/span[1]/img/@src").extract()[0]
d_item['img_desc'] = m_field.xpath("div/p[@class='tmb']/a/span[1]/img/@alt").extract()[0]
d_item['price'] = m_field.xpath("div/div/p[1]/text()").extract()[0]
items.append(d_item)
return items
| {
"content_hash": "0187b9955b65afefc7f5c0697bd2d1b9",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 115,
"avg_line_length": 42.85294117647059,
"alnum_prop": 0.6170212765957447,
"repo_name": "counsellors/scrapy_dmm_dvd",
"id": "0f8577395bcada1e514327785811621e672d972d",
"size": "1481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dmm_joke/dmm_joke/spiders/dvd_list_spider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "81893"
},
{
"name": "Python",
"bytes": "24712"
}
],
"symlink_target": ""
} |
import cherrypy
import uuid
import os
from urllib.parse import parse_qs, urlsplit
from ConfigManager.ConfigArgs import ConfigArg, ConfigArgs
from ConfigManager.ConfigManager import ConfigManager
from rf2db.utils import urlutil
from rf2db.parameterparser.ParmParser import booleanparam
_curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
settings_filename = os.path.join(os.path.dirname(__file__), '..', '..', 'settings.conf')
config_parms = ConfigArgs('authentication',
[ConfigArg('autobypass', abbrev='a', help='True means skip the authentication screen'),
ConfigArg('manualbypass', abbrev='m', help='True means bypass=1 is allowed')
])
settings = ConfigManager(config_parms)
license_html = open(os.path.join(_curdir, '..', 'html', 'license.html')).read()
SESSION_KEY = '_copyright_ack'
CHALLENGE = '_challenge'
FROM_PAGE = '_from_page'
def check_auth(*_, **kwargs):
""" Check whether the user is authorized to use a page that has IHTSDO content. A user can be authorized by:
1. Setting tools.auth.no_auth to True in the class header
2. Carrying the passed session key in the request header
3. Setting autobypass in the settings to "True" (debug mode)
4. Adding a "bypass" parameter to the request header and setting manualbypass in the settings to True
"""
# Don't try if disabled
if booleanparam.v(settings.autobypass, default=False) or kwargs.get('no_auth'):
return
# Check for already authenticated
if cherrypy.session.get(SESSION_KEY) and cherrypy.session.get(SESSION_KEY) == cherrypy.session.get(CHALLENGE):
return
# If the kwargs include a bypass keyword, go on as well
rqst = cherrypy.request.request_line.split()[1]
if booleanparam.v(settings.manualbypass, default=False) and 'bypass' in parse_qs(urlsplit(rqst).query, True):
return
# Not authorized redirect it to the authorization session
cherrypy.session[CHALLENGE] = uuid.uuid4()
cherrypy.session[FROM_PAGE] = rqst
urlutil.redirect('license')
cherrypy.tools.auth = cherrypy.Tool('before_handler', check_auth)
class License(object):
_cp_config = {
'tools.auth.no_auth': True}
@cherrypy.expose
@cherrypy.tools.allow()
def index(self):
return license_html % {'token': cherrypy.session.get(CHALLENGE, 'none')}
@cherrypy.expose
@cherrypy.tools.allow(methods=['POST'])
def submit(self, accept=None, token=None):
if accept and token == str(cherrypy.session.get(CHALLENGE, 'NoN')):
cherrypy.session[SESSION_KEY] = cherrypy.session[CHALLENGE]
raise cherrypy.HTTPRedirect(cherrypy.session.pop(FROM_PAGE))
else:
raise cherrypy.HTTPRedirect("http://ihtsdo.org/license")
| {
"content_hash": "e29a112899c26c6530d8337e7d61837a",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 114,
"avg_line_length": 37.3421052631579,
"alnum_prop": 0.6811134601832276,
"repo_name": "cts2/ihtsdoauth",
"id": "e425cd06d6df9fc34e73e595b9f9819a33424cf0",
"size": "4415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "auth/ihtsdoauth.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "5645"
},
{
"name": "Python",
"bytes": "6015"
}
],
"symlink_target": ""
} |
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1DaemonSetUpdateStrategy(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'rolling_update': 'V1beta1RollingUpdateDaemonSet',
'type': 'str'
}
attribute_map = {
'rolling_update': 'rollingUpdate',
'type': 'type'
}
def __init__(self, rolling_update=None, type=None):
"""
V1beta1DaemonSetUpdateStrategy - a model defined in Swagger
"""
self._rolling_update = None
self._type = None
self.discriminator = None
if rolling_update is not None:
self.rolling_update = rolling_update
if type is not None:
self.type = type
@property
def rolling_update(self):
"""
Gets the rolling_update of this V1beta1DaemonSetUpdateStrategy.
Rolling update config params. Present only if type = \"RollingUpdate\".
:return: The rolling_update of this V1beta1DaemonSetUpdateStrategy.
:rtype: V1beta1RollingUpdateDaemonSet
"""
return self._rolling_update
@rolling_update.setter
def rolling_update(self, rolling_update):
"""
Sets the rolling_update of this V1beta1DaemonSetUpdateStrategy.
Rolling update config params. Present only if type = \"RollingUpdate\".
:param rolling_update: The rolling_update of this V1beta1DaemonSetUpdateStrategy.
:type: V1beta1RollingUpdateDaemonSet
"""
self._rolling_update = rolling_update
@property
def type(self):
"""
Gets the type of this V1beta1DaemonSetUpdateStrategy.
Type of daemon set update. Can be \"RollingUpdate\" or \"OnDelete\". Default is OnDelete.
:return: The type of this V1beta1DaemonSetUpdateStrategy.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this V1beta1DaemonSetUpdateStrategy.
Type of daemon set update. Can be \"RollingUpdate\" or \"OnDelete\". Default is OnDelete.
:param type: The type of this V1beta1DaemonSetUpdateStrategy.
:type: str
"""
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1DaemonSetUpdateStrategy):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "cd8e0bd48d789d2d681d1c22714b081a",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 105,
"avg_line_length": 28.32894736842105,
"alnum_prop": 0.570599163957269,
"repo_name": "mbohlool/client-python",
"id": "004c46a74aafc1428801c8f1bfdb070272a57933",
"size": "4323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v1beta1_daemon_set_update_strategy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8417639"
},
{
"name": "Shell",
"bytes": "16830"
}
],
"symlink_target": ""
} |
"""
Simple Training CLI.
"""
import argparse
import json
import os
import pickle
import random
import shutil
import sys
from contextlib import ExitStack
from typing import Optional, Dict
import mxnet as mx
import numpy as np
from sockeye.log import setup_main_logger, log_sockeye_version
from sockeye.utils import acquire_gpus, check_condition, get_num_gpus, expand_requested_device_ids
from . import arguments
from . import attention
from . import constants as C
from . import coverage
from . import data_io
from . import decoder
from . import encoder
from . import initializer
from . import lexicon
from . import loss
from . import lr_scheduler
from . import model
from . import rnn
from . import training
from . import vocab
def none_if_negative(val):
return None if val < 0 else val
def _build_or_load_vocab(existing_vocab_path: Optional[str], data_path: str, num_words: int,
word_min_count: int) -> Dict:
if existing_vocab_path is None:
vocabulary = vocab.build_from_path(data_path,
num_words=num_words,
min_count=word_min_count)
else:
vocabulary = vocab.vocab_from_json(existing_vocab_path)
return vocabulary
def _list_to_tuple(v):
"""Convert v to a tuple if it is a list."""
if isinstance(v, list):
return tuple(v)
return v
def _dict_difference(dict1: Dict, dict2: Dict):
diffs = set()
for k, v in dict1.items():
# Note: A list and a tuple with the same values is considered equal
# (this is due to json deserializing former tuples as list).
if k not in dict2 or _list_to_tuple(dict2[k]) != _list_to_tuple(v):
diffs.add(k)
return diffs
def main():
params = argparse.ArgumentParser(description='CLI to train sockeye sequence-to-sequence models.')
arguments.add_io_args(params)
arguments.add_model_parameters(params)
arguments.add_training_args(params)
arguments.add_device_args(params)
args = params.parse_args()
# seed the RNGs
np.random.seed(args.seed)
random.seed(args.seed)
mx.random.seed(args.seed)
if args.use_fused_rnn:
check_condition(not args.use_cpu, "GPU required for FusedRNN cells")
if args.rnn_residual_connections:
check_condition(args.rnn_num_layers > 2, "Residual connections require at least 3 RNN layers")
check_condition(args.optimized_metric == C.BLEU or args.optimized_metric in args.metrics,
"Must optimize either BLEU or one of tracked metrics (--metrics)")
# Checking status of output folder, resumption, etc.
# Create temporary logger to console only
logger = setup_main_logger(__name__, file_logging=False, console=not args.quiet)
output_folder = os.path.abspath(args.output)
resume_training = False
training_state_dir = os.path.join(output_folder, C.TRAINING_STATE_DIRNAME)
if os.path.exists(output_folder):
if args.overwrite_output:
logger.info("Removing existing output folder %s.", output_folder)
shutil.rmtree(output_folder)
os.makedirs(output_folder)
elif os.path.exists(training_state_dir):
with open(os.path.join(output_folder, C.ARGS_STATE_NAME), "r") as fp:
old_args = json.load(fp)
arg_diffs = _dict_difference(vars(args), old_args) | _dict_difference(old_args, vars(args))
# Remove args that may differ without affecting the training.
arg_diffs -= set(C.ARGS_MAY_DIFFER)
# allow different device-ids provided their total count is the same
if 'device_ids' in arg_diffs and len(old_args['device_ids']) == len(vars(args)['device_ids']):
arg_diffs.discard('device_ids')
if not arg_diffs:
resume_training = True
else:
# We do not have the logger yet
logger.error("Mismatch in arguments for training continuation.")
logger.error("Differing arguments: %s.", ", ".join(arg_diffs))
sys.exit(1)
else:
logger.error("Refusing to overwrite existing output folder %s.", output_folder)
sys.exit(1)
else:
os.makedirs(output_folder)
logger = setup_main_logger(__name__,
file_logging=True,
console=not args.quiet, path=os.path.join(output_folder, C.LOG_NAME))
log_sockeye_version(logger)
logger.info("Command: %s", " ".join(sys.argv))
logger.info("Arguments: %s", args)
with open(os.path.join(output_folder, C.ARGS_STATE_NAME), "w") as fp:
json.dump(vars(args), fp)
with ExitStack() as exit_stack:
# context
if args.use_cpu:
logger.info("Device: CPU")
context = [mx.cpu()]
else:
num_gpus = get_num_gpus()
check_condition(num_gpus >= 1,
"No GPUs found, consider running on the CPU with --use-cpu "
"(note: check depends on nvidia-smi and this could also mean that the nvidia-smi "
"binary isn't on the path).")
if args.disable_device_locking:
context = expand_requested_device_ids(args.device_ids)
else:
context = exit_stack.enter_context(acquire_gpus(args.device_ids, lock_dir=args.lock_dir))
logger.info("Device(s): GPU %s", context)
context = [mx.gpu(gpu_id) for gpu_id in context]
# load existing or create vocabs
if resume_training:
vocab_source = vocab.vocab_from_json_or_pickle(os.path.join(output_folder, C.VOCAB_SRC_NAME))
vocab_target = vocab.vocab_from_json_or_pickle(os.path.join(output_folder, C.VOCAB_TRG_NAME))
else:
num_words_source = args.num_words if args.num_words_source is None else args.num_words_source
vocab_source = _build_or_load_vocab(args.source_vocab, args.source, num_words_source, args.word_min_count)
vocab.vocab_to_json(vocab_source, os.path.join(output_folder, C.VOCAB_SRC_NAME) + C.JSON_SUFFIX)
num_words_target = args.num_words if args.num_words_target is None else args.num_words_target
vocab_target = _build_or_load_vocab(args.target_vocab, args.target, num_words_target, args.word_min_count)
vocab.vocab_to_json(vocab_target, os.path.join(output_folder, C.VOCAB_TRG_NAME) + C.JSON_SUFFIX)
vocab_source_size = len(vocab_source)
vocab_target_size = len(vocab_target)
logger.info("Vocabulary sizes: source=%d target=%d", vocab_source_size, vocab_target_size)
config_data = data_io.DataConfig(os.path.abspath(args.source),
os.path.abspath(args.target),
os.path.abspath(args.validation_source),
os.path.abspath(args.validation_target),
args.source_vocab,
args.target_vocab)
# create data iterators
max_seq_len_source = args.max_seq_len if args.max_seq_len_source is None else args.max_seq_len_source
max_seq_len_target = args.max_seq_len if args.max_seq_len_target is None else args.max_seq_len_target
train_iter, eval_iter = data_io.get_training_data_iters(source=config_data.source,
target=config_data.target,
validation_source=config_data.validation_source,
validation_target=config_data.validation_target,
vocab_source=vocab_source,
vocab_target=vocab_target,
batch_size=args.batch_size,
fill_up=args.fill_up,
max_seq_len_source=max_seq_len_source,
max_seq_len_target=max_seq_len_target,
bucketing=not args.no_bucketing,
bucket_width=args.bucket_width)
# learning rate scheduling
learning_rate_half_life = none_if_negative(args.learning_rate_half_life)
# TODO: The loading for continuation of the scheduler is done separately from the other parts
if not resume_training:
lr_scheduler_instance = lr_scheduler.get_lr_scheduler(args.learning_rate_scheduler_type,
args.checkpoint_frequency,
learning_rate_half_life,
args.learning_rate_reduce_factor,
args.learning_rate_reduce_num_not_improved)
else:
with open(os.path.join(training_state_dir, C.SCHEDULER_STATE_NAME), "rb") as fp:
lr_scheduler_instance = pickle.load(fp)
# model configuration
num_embed_source = args.num_embed if args.num_embed_source is None else args.num_embed_source
num_embed_target = args.num_embed if args.num_embed_target is None else args.num_embed_target
config_rnn = rnn.RNNConfig(cell_type=args.rnn_cell_type,
num_hidden=args.rnn_num_hidden,
num_layers=args.rnn_num_layers,
dropout=args.dropout,
residual=args.rnn_residual_connections,
forget_bias=args.rnn_forget_bias)
config_conv = None
if args.encoder == C.RNN_WITH_CONV_EMBED_NAME:
config_conv = encoder.ConvolutionalEmbeddingConfig(num_embed=num_embed_source,
max_filter_width=args.conv_embed_max_filter_width,
num_filters=args.conv_embed_num_filters,
pool_stride=args.conv_embed_pool_stride,
num_highway_layers=args.conv_embed_num_highway_layers,
dropout=args.dropout)
config_encoder = encoder.RecurrentEncoderConfig(vocab_size=vocab_source_size,
num_embed=num_embed_source,
rnn_config=config_rnn,
conv_config=config_conv)
config_decoder = decoder.RecurrentDecoderConfig(vocab_size=vocab_target_size,
num_embed=num_embed_target,
rnn_config=config_rnn,
dropout=args.dropout,
weight_tying=args.weight_tying,
context_gating=args.context_gating,
layer_normalization=args.layer_normalization)
attention_num_hidden = args.rnn_num_hidden if not args.attention_num_hidden else args.attention_num_hidden
config_coverage = None
if args.attention_type == "coverage":
config_coverage = coverage.CoverageConfig(type=args.attention_coverage_type,
num_hidden=args.attention_coverage_num_hidden,
layer_normalization=args.layer_normalization)
config_attention = attention.AttentionConfig(type=args.attention_type,
num_hidden=attention_num_hidden,
input_previous_word=args.attention_use_prev_word,
rnn_num_hidden=config_rnn.num_hidden,
layer_normalization=args.layer_normalization,
config_coverage=config_coverage)
config_loss = loss.LossConfig(type=args.loss,
vocab_size=vocab_target_size,
normalize=args.normalize_loss,
smoothed_cross_entropy_alpha=args.smoothed_cross_entropy_alpha)
model_config = model.ModelConfig(config_data=config_data,
max_seq_len=max_seq_len_source,
vocab_source_size=vocab_source_size,
vocab_target_size=vocab_target_size,
config_encoder=config_encoder,
config_decoder=config_decoder,
config_attention=config_attention,
config_loss=config_loss,
lexical_bias=args.lexical_bias,
learn_lexical_bias=args.learn_lexical_bias)
model_config.freeze()
# create training model
training_model = training.TrainingModel(config=model_config,
context=context,
train_iter=train_iter,
fused=args.use_fused_rnn,
bucketing=not args.no_bucketing,
lr_scheduler=lr_scheduler_instance)
# We may consider loading the params in TrainingModule, for consistency
# with the training state saving
if resume_training:
logger.info("Found partial training in directory %s. Resuming from saved state.", training_state_dir)
training_model.load_params_from_file(os.path.join(training_state_dir, C.TRAINING_STATE_PARAMS_NAME))
elif args.params:
logger.info("Training will initialize from parameters loaded from '%s'", args.params)
training_model.load_params_from_file(args.params)
lexicon_array = lexicon.initialize_lexicon(args.lexical_bias,
vocab_source, vocab_target) if args.lexical_bias else None
weight_initializer = initializer.get_initializer(args.rnn_h2h_init, lexicon=lexicon_array)
optimizer = args.optimizer
optimizer_params = {'wd': args.weight_decay,
"learning_rate": args.initial_learning_rate}
if lr_scheduler_instance is not None:
optimizer_params["lr_scheduler"] = lr_scheduler_instance
clip_gradient = none_if_negative(args.clip_gradient)
if clip_gradient is not None:
optimizer_params["clip_gradient"] = clip_gradient
if args.momentum is not None:
optimizer_params["momentum"] = args.momentum
if args.normalize_loss:
# When normalize_loss is turned on we normalize by the number of non-PAD symbols in a batch which implicitly
# already contains the number of sentences and therefore we need to disable rescale_grad.
optimizer_params["rescale_grad"] = 1.0
else:
# Making MXNet module API's default scaling factor explicit
optimizer_params["rescale_grad"] = 1.0 / args.batch_size
logger.info("Optimizer: %s", optimizer)
logger.info("Optimizer Parameters: %s", optimizer_params)
training_model.fit(train_iter, eval_iter,
output_folder=output_folder,
max_params_files_to_keep=args.keep_last_params,
metrics=args.metrics,
initializer=weight_initializer,
max_updates=args.max_updates,
checkpoint_frequency=args.checkpoint_frequency,
optimizer=optimizer, optimizer_params=optimizer_params,
optimized_metric=args.optimized_metric,
max_num_not_improved=args.max_num_checkpoint_not_improved,
min_num_epochs=args.min_num_epochs,
monitor_bleu=args.monitor_bleu,
use_tensorboard=args.use_tensorboard)
if __name__ == "__main__":
main()
| {
"content_hash": "415588a6f611c677da111598f8d0ee7e",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 120,
"avg_line_length": 53.04012345679013,
"alnum_prop": 0.5356415478615071,
"repo_name": "KellenSunderland/sockeye",
"id": "3409e98662f96d07b07c3e5e42e5579cc473a9ab",
"size": "17751",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sockeye/train.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "467469"
},
{
"name": "Shell",
"bytes": "1662"
}
],
"symlink_target": ""
} |
import discord
from discord.ext import commands
import asyncio
import sys,hashlib
import traceback
###############################################################
#
# BOT FOR DISCORD
# music and shet
#
###############################################################
#Compare has value for UserSetting to see if user change it.
# if left -> first run
# if change -> can read info for
###############################################################
#
# this code of getting hash for files can be found on
# StackOverFlow, thanks whoever posted this. Forgot the name and URL!
#
###############################################################
#checking both MD5 and SHA1 hash
# BUF_SIZE is totally arbitrary, change for your app!
BUF_SIZE = 65536 # lets read stuff in 64kb chunks!
MD5hash="a91c0066275651b0a42e1d0dfc153d26"
SHA1hash="c8435a4ff311e43857a278eeaf70ce0613e77ecf"
#default hash value for the file
md5 = hashlib.md5()
sha1 = hashlib.sha1()
#getting current hash value for the file
with open("UserSetting.txt", 'rb') as f:
while True:
data = f.read(BUF_SIZE)
if not data:
break
md5.update(data)
sha1.update(data)
#default token, login if wrong username, pwd, token provided
defaultToken=""
#Globals are gross, let's pass information with an object
#
#UserSettings object, manages our settings information
#
class UserSettings(object):
#Gets and sets for the fields
def _setLoginMethod(self, method):
self._LoginMethod = method
def _getLoginMethod(self):
return self._LoginMethod
def _setToken(self, token):
self._Token = token
def _getToken(self):
return self._Token
def _setUsername(self, username):
self._Username = username
def _getUsername(self):
return self._Username
def _setPassword(self, password):
self._Password = password
def _getPassword(self):
return self._Password
def _setInvite(self, invite):
self._Invite = invite
def _getInvite(self):
return self._Invite
#Property definitions
loginMethod = property(_getLoginMethod, _setLoginMethod)
token = property(_getToken, _setToken)
username = property(_getUsername, _setUsername)
password = property(_getPassword, _setPassword)
invite = property(_getInvite, _setInvite)
#Method definitions
#We can read in the settings a little more dynamically or we can manually set them with properties
def readUserSetting(settingsFile): #UserSetting.txt
settingFile= open(settingsFile, 'r')
for text in settingFile.readlines():
things = text.split()
if things[0] == 'Login_Method=':
if things[1].lower() == 'token':
UserSettings.loginMethod = "token"
elif things[1].lower() == 'username':
UserSettings.loginMethod = "username"
elif things[0] == 'Token=' and UserSettings.loginMethod == "token":
UserSettings.token = things[1]
elif things[0] == 'Username=' and UserSettings.loginMethod == "username":
UserSettings.username = things[1]
elif things[0] == 'Password=' and UserSettings.loginMethod == "username":
UserSettings.password = things[1]
elif things[0] == 'Invite=':
UserSettings.invite = things[1]
settingFile.close()
###################################################################
#
# Running first time set up, get infor from user. update fields
# write it to UserSetting for future use
#
###################################################################
def _firstTimeSetup(settings):
print("First time running the bot, required information. Would you like to run the setup? Y/N")
userInput = None
while True:
userInput=input()
if userInput.lower() == 'y' or userInput.lower() == 'n':
break
else:
print("Invalid input, please enter Y/N")
#getting information
if userInput.lower()=='y':
print ("Login method: token or username and password?- T/U")
while True:
userInput=input()
if userInput.lower() == 't':
settings.loginMethod = "token"
break
elif userInput.lower() == 'u':
settings.loginMethod = "username"
break
else:
print ("Invalid input, T/U")
if settings.loginMethod == "token":
settings.token = input("Enter the token: ")
elif settings.loginMethod == "username":
settings.username= input("Enter Username: ")
settings.password= input("Enter Password: ")
settings.invite= input("Copy and Paste the Server Invite: ")
#writtng the settings to the user file for future use
settingFile= open('UserSetting.txt', 'w')
if settings.loginMethod == "token":
settingFile.write('Login_Method= token\n')
settingFile.write('Token= ' + settings.token + '\n')
settingFile.write('Username= \n')
settingFile.write('Password= \n')
else:
settingFile.write('Login_Method= username\n')
settingFile.write('Token= \n')
settingFile.write('Username= '+ settings.username+'\n')
settingFile.write('Password= '+ settings.password+'\n')
settingFile.write('Invite= ' + settings.invite)
settingFile.close()
elif userInput.lower() =='n':
try:
settings.readUserSetting(UserSetting.txt)
except:
print("ERROR ---- make sure to put correct information in UserSetting.txt ")
###################################################################
#
# Join a channel or server by invite
#
# Exception: HTTPException - Accepting the invite failed.
# NotFound - The invite is invalid/expired
# Exit when exception occurs
#
###################################################################
def _join_by_invite(Invite):
try:
print(Invite)
bot.accept_invite(Invite)
# bot.send_message(server, "The bot has joined!")
except (discord.HTTPException, discord.NotFound):
print("Failed to accept the invite, please check the invite URL, if the bot can join the channel or not")
sys.exit(0)
###################################################################
#
# Display in chat all the options for the bot
#
###################################################################
def _display_option(message):
option= "Options for the bot: \n!join: join a server or channel url that given by user Ex: !join INVITE_URL\n!help: displaying all options\n!music + VOICE_CHANNEL_INVITE : enable music\n!addsong + SONG_URL : add song to playlist, must be youtube or soundcloud URL\n!skipsong : skip current song\n!pause : pause : pause current song\n!resume : resume current song\n!play : start playing\n!volume + NUMBER : setvolume, number between 0.0 to 2.0. 1.0=100%, 2.0=200%"
bot.send_message(message.channel, option)
#check hash value
settings = UserSettings()
if MD5hash == md5.hexdigest() and SHA1hash==sha1.hexdigest():
firstTimeSetup(UserSettings)
else:
print("Reading those settings")
UserSettings.readUserSetting("UserSetting.txt")
#creating an instance of a bot
bot=commands.Bot(command_prefix='!', description="A wicked bad ass bot")
#logging in
try:
if UserSettings.loginMethod == "token":
bot.login(UserSettings.token)
print(UserSettings.token)
else:
bot.login(UserSettings.username, UserSettings.password)
except discord.LoginFailure:
print("Failed to login -- Wrong Token or Username/Password")
print("Loging in with default token ....")
bot.login(defaultToken)
except discord.HTTPException:
print(" An unknown HTTP related error occurred, usually when it isn’t 200 or the known incorrect credentials passing status code.")
bot.close()
sys.quit(0)
#joining a channel
try:
join_by_invite(UserSettings.invite)
except:
print("[-] Looks like we had some issues with the invite... Exiting")
traceback.print_exc()
exit(0)
#####################################################################
#
# Listen to user message to see if user give the bot any command
# Command rule: start with ! and a action
# Command: !join +"INVITE_URL" - tell the bot to join a different channel or server
# !music + "VOICE_CHANNEL_URL" - tell the bot to join a voice channel and stream music
# !addsong + "SONG_URL- YOUTUBE/SOUNDCLOUD" - add a song to the playlist
# !play - tell the bot to play music, if not song in queue, play default playlist
# !skipsong - skip the current song
# !pause - pause the current song
# !resume - resume the current song
# !volume + "NUMBER: 0.0 - 2.0" - set the volume 1.0=100%, 2.0=200%
# !help - displaying all the option, commands, and syntax
#
######################################################################
@bot.event
@asyncio.coroutine
def on_ready():
print('Logged in as')
print(bot.user.name)
print(bot.user.id)
print('------')
__music=None
@bot.event
@asyncio.coroutine
def on_message(message):
if message.content.startswith("!join"):
_join_by_invite(message.content.strip("!join "))
if message.content.startswith("!help"):
_display_option(message)
if message.content.startswith("!music"):
#play music
__music= Audio(message.content.strip("!music "), bot)
print("Music has been enabled!")
if message.content.startswith("!addsong"):
#adding song to playlist if music feature is enable
__music.add_song_to_playlist(message.content.strip("!addsong "))
if message.content.startswith("!pause"):
#pausing song
__music.pause()
if message.content.startswith("!resume"):
#resuming song
__music.resume()
if message.content.startswith("!skipsong"):
#skipping song
__music.skipsong()
if message.content.startswith("!volume"):
#setting volume
__music.set_volume(message.content.strip("!volume "))
if message.content.startswith("!play"):
#play music
__music.play()
bot.run(UserSettings.token) #Hack for now
| {
"content_hash": "ddad6cdc9d609c0c8dba82f75c4cbb28",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 467,
"avg_line_length": 35.981549815498155,
"alnum_prop": 0.6330632755614809,
"repo_name": "ttran49/DISCORDBOT",
"id": "44838b9589364fb324ad34aa65fcd6fd2874c369",
"size": "9753",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17454"
}
],
"symlink_target": ""
} |
"""
Constraint Layer Array (CLA) Class
==================================
.. autoclass:: CLA
:members:
"""
#####################################################################
#This file is part of RGPA.
#Foobar is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#Foobar is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with Foobar. If not, see <http://www.gnu.org/licenses/>.
#-------------------------------------------------------------------
#authors :
#Nicolas AMIOT : nicolas.amiot@univ-rennes1.fr
#Bernard UGUEN : bernard.uguen@univ-rennes1.fr
#Mohamed LAARAIEDH : mohamed.laaraiedh@univ-rennes1.fr
#####################################################################
from pylayers.util.project import *
import numpy as np
import scipy as sp
import time
from pylayers.location.geometric.util.boxn import *
from pylayers.location.geometric.util import geomview as g
from pylayers.location.geometric.util.scene import *
try:
from interval import interval,inf
pyinterval_installed=True
except:
pyinterval_installed=False
import os
import sys
#__docformat__ = 'reStructuredText'
class CLA(object):
""" Constraint Layer Array class
The Constraint Layer Array gather all constraints and process them.
Attributes
----------
c : list
contraints contained in CLA
type : list
types of contraints contained in CLA
std : list
standard deviation of constraints
vcw : list
scale factor of constraints
Nc : integer
Layer number of current processing
pe : np.array
Position estimated
dlayer : dictionnary
key : Layer number
value : list of Enclosed (0) and ambiguous (1) boxes.
iter : integer
current iteration of refine process
erronous : list
fills with number constraint which are not compatibleselfselfselfself.
Methods
-------
info(self) : Give info
compute(pe=True,mergeRSS=False,refineRSS=True, NBOXMAX=50, VOLMIN=0.001,HT=True,forceamb=False):
compute the CLA to estimate the positon.
rescale(self,f_vcw,cid=None) : rescale Constraint Box
annulus_bound(self,cid=None) : rescale Constraint
append(self,c) : Append a Constraint to CLA
setvcw(self,vcw): : Set vcw for all constraint
merge2(self,vcw_init=1.0) : Merge all constraint from the CLA
valid_v(self,lv,N) : Test vertexes with all constraints
refine(self,l,NBOXMAX=100,VOLMIN=0.1) : reduce the validity zone
show3(self,l=-1,amb=False,sc='all') : show3
prob(self,c,d) : Compute DDP for the given vertexes
gapdetect(self,l,dlindx) : Gap detection for bimodal solution
min_dist(self,a,b) : OBSOLETE
estpos2(self,l=-1,amb=False) : Position estimation
"""
# MEMBERS
# Nc : number of constraints
# c : list of constraints 1 x Nc
# std : list of standard deviation of constraints 1 x Nc
# if std = 0 it means the constraint is hard and it force the
# the estimated point to belong to the bounding box of this
# constraint
# w : list of weight of constraints 1 x Nc
# if w = 0 it means the constraint is hard and it force the
# the estimated point to belong to the bounding box of this
# constraint
#
# validity : validity array (N x Nc)
# dlayer : dictionnary containing a list of 2 elements :
# - the list of boxes that are inside the validity area (VA)
# - the list of boxes which at least an edge is inside the validity area(VA)
# dpe : dictionnary containing the estimated points
# :Methods:
# info()
# append(c,std)
# remove(c,k)
# merge2()
# layer(lbox,l=-1)
# grid(l=-1,Msz=1000)
# eval(Msz=1000)
# show3()
# estpos(amb=False)
# List of elementary Constraints
def __init__(self, parmsh={}):
self.c = []
self.type = []
self.std = []
self.w = []
self.vcw = []
self.Nc = 0
self.pe = np.array([])
self.dlayer = {}
self.iter = 0
self.erronous = []
self.id = []
self.origin = []
self.runable = [] # does pe is known ?
self.visible = [] # does link physically exist ? aka 2 nodes are in visiblity ?
self.obsolete = [] # is the ldp has been obtain a long time ago
self.usable=[] # constraints are usable = runable + visible
if len(parmsh) == 0:
self.parmsh = parmsh
self.parmsh['display'] = False # launch geomview K
self.parmsh['scene'] = False # display whole scene
self.parmsh['boxes'] = True # display constraint box
self.parmsh['constr_boxes'] = True # display constraint box
self.parmsh['estimated'] = True # display estimated point
else:
self.parmsh = parmsh
def __repr__(self):
"""
"""
np.set_printoptions(precision=3)
s = '{0:4} | {1:6} |{2:4} | {3:4} | {4:15}| {5:9}| {6:5}| {7:7}| {8:6}|'.format('node','peer','type', 'wstd', 'p', 'value', 'std', 'runable' , 'usable' )
for c in self.c:
node = c.origin['id']
peer = c.origin['link']
try:
wstd = c.origin['wstd']
except:
wstd = '---'
if c.type != 'TDOA':
s = s + '\n' + '{0:4} | {1:6} |{2:4} | {3:4} | {4:15}| {5:9}| {6:5}| {7:7}| {8:6}|'.format(node,peer,c.type,wstd, c.p, c.value, c.std, c.runable, c.usable)
else:
s = s + '\n' + '{0:4} | {1:6} |{2:4} | {3:4} | {4:15}| {5:9}| {6:5}| {7:7}| {8:6}|'.format(node,peer,c.type,wstd, c.p[0], c.value, c.std, c.runable, c.usable)
s = s + '\n' + ' '+str(c.p[1])
# s = s + '\n' + '{0:4} | {1:15}| {2:9}| {3:5}| {4:7}| {5:6}| {6:8}| {7:9}'.format(c.type, c.p[0], c.value, c.std, c.runable, c.usable , c.obsolete , c.evaluated)
# s = s + '\n' + ' '+str(c.p[1])
s = s + '\n\n' + 'position evaluated by the CLA\n' + str(self.pe)
return s
def info(self):
""" get information about constraint
"""
for c in self.c:
c.info()
def update(self):
"""update
update all constraints of the CLA
"""
[c.update() for c in self.c if c.runable]
self.runable=[c.runable for c in self.c]
self.obsolete=[c.obsolete for c in self.c]
self.visible=[c.visible for c in self.c]
self.usable=[c.usable for c in self.c]
def compute(self,pe=True,mergeRSS=False,refineRSS=True, NBOXMAX=50, VOLMIN=0.001,HT=True,forceamb=False):
"""
Compute the cla to estimate the postion
Parameters
----------
pe : boolean
set to True to compute the position estimation store into self.pe
mergeRSS : boolean
True if there is RSS in cla, they are used to find the smallest merge
False (default) even if there is RSS in cla, they are neglected during the merge process
refineRSS :boolean
True (default) if there is RSS in cla, they are used to decide if boxes are enclosed of ambiguous
during the refine process
False if there is RSS in cla, they are ignore during the refine process
NBOXMAX : integer
Choose the maximum boxes generated during the refine process (escape value of the while and recursive function)
NVOLMIN : float
Choose the minimum volume of the boxes obtained during the refine process (escape value of the while and recursive function)
HT : boolean
True if a cluster ppears (2 sets of distinct boxes ) an hypthesis testuing method is applied
in estpos2 method
False no HT method is applied
Notes
-----
Description of the hypothesis testing (HT) method in:
.. [APLU2012] N. Amiot, T. Pedersen, M. Laaraiedh, B. Uguen.
A Hybrid Positioning Method Based on Hypothesis Testing
,Wireless Communications Letters, IEEE, vol.1, no.4, pp.348-351, August 2012 http://ieeexplore.ieee.org.passerelle.univ-rennes1.fr/stamp/stamp.jsp?tp=&arnumber=6205594
Returns
-------
return : boolean
True if the position estimation has been performed.
update a self.pe which contain the estimated position
"""
self.merge2(RSS=mergeRSS)
self.refine(l=self.Nc,NBOXMAX=NBOXMAX, VOLMIN=VOLMIN,RSS=refineRSS)
self.update()
if (sum(self.usable) >= 3) and (pe == True):
self.estpos2(HT=HT)
self.Nc=len(np.where(self.usable)[0])
return True
elif forceamb:
self.estpos2(HT=HT)
return False
else:
self.Nc=len(np.where(self.usable)[0])
return False
# def compute_amb(self,pe=True,HT=True):
# self.merge2(RSS=False)
# self.refine(self.Nc,RSS=False)
# self.estpos2(HT=HT)
# self.Nc=len(np.where(self.usable)[0])
# return True
def rescale(self, f_vcw, cid=None):
"""idem setvcw but update current vcw with a multiplier factor
change vcw for all constraints of the CLA
Parameters
----------
f_vcw : a scale factor of the current vcw of the constraint.
cid : a list of constraints for which the self.vcw will be applied. If cid=None, all constraints are updates. default=None
Returns
-------
Nothing but update vcw either for each constraints from cid list either for all contraints in the CLA list self.c
"""
#print "rescale",vcw
if cid is None:
[c.rescale(f_vcw * c.vcw) for c in self.c]
else:
[c.rescale(f_vcw * c.vcw) for c in self.c if c.Id in cid]
def annulus_bound(self, cid=None):
""" adapt cmin and cmax of constraints
Update cmin and cmax of constraints for a given self.vcw
:Parameters:
cid : a list of constraints for which the self.vcw will be applied. If cid=None, all constraints are updates. default=None
:Returns:
Nothing but update boxe size either for each constraints from cid list either for all contraints in the CLA list self.c.
"""
#print "rescale",vcw
if cid is None:
[c.annulus_bound() for c in self.c]
else:
[c.annulus_bound() for c in self.c if c.Id in cid]
def append(self, c):
"""add a constraint into the CLA
add a constraint into the CLA
Parameters
----------
c : any constraint wichi heritates from Constraint object
Returns
-------
Nothing but fills self.c list of constraints
"""
self.c.append(c)
self.id.append(c.id)
self.origin.append(c.origin)
self.type.append(c.type)
self.runable.append(c.runable)
self.visible.append(c.runable)
self.obsolete.append(c.obsolete)
# by default, if a constraint is runable, it will be used
self.usable.append(c.runable and c.visible and not c.obsolete)
self.std.append(c.std)
self.Nc = self.Nc + 1
self.vcw.append(c.vcw)
#
# Reevaluate weights
#
u = np.nonzero(np.array(self.std) > 0) # std >0
sumstd = np.sum(np.array(self.std)[u], axis=0).astype('float')
self.w = np.array(self.std) / sumstd
self.ndim = c.lbox.ndim
def remove(self, k):
"""OBSOLETE/ TO BE DEVELOPPED
remove(k) : remove a constraint to cla
"""
self.c.remove(self.c[k])
self.std.remove(c.std[k])
sumstd = np.sum(np.array(self.std)[u], axis=0).astype('float')
self.Nc = self.Nc - 1
#
# Reevaluate weights
#
u = np.nonzero(np.array(self.std) > 0) # std >0
sumstd = np.sum(np.array(self.std)[u], axis=0)
self.w = np.array(self.std) / sumstd
def setvcw(self, vcw):
"""update scale factor of all constraint
rescale all the constraints's boxes according to the given vcw
Parameters
-----------
vcw : a vcw value
RSS : boolean
True : RSS are considered in merging
False : RSS are excluded from merging
Returns
-------
Nothing but update all constraint from the CLA
"""
for c in self.c:
c.rescale(vcw)
def merge2(self, vcw_init=1.0, RSS=False):
"""Merge all constraints from the CLA2_reduc2
Inteligent merging of constraints in the CLA and look for the smallest intersection box of all the constraints through a dichotomous process.
- if the result of this merging is empty (no common intersections between all the boxes), all the constraints's vcw are increased (x2) and this processing is operated until an intersection exists (physically intersection MUST exist)
- if the result of this merging is not empty (intersection exists between all the boxes), all the constraints's vcw are decreased and this processing is operated until no intersection exists. the previous value of vcw is thus used for all constraints.
This method ensure to find the smallest instersection box satisfaying all the constraints
Also here is initialized self.dlayer.
use of dlayer dictionnary:
self.dlayer[Layer][type of boxes]
Layer = number of intersecting constraints
type of boxes : 0 = enclose boxes (EB)
1 = ambiguous boxes (AB)
After the merging, all constraints boxes are store as AB list. EB list is void.
Parameters
----------
vcw_init : float
intial value of scale factor vcw. This value is updated during the process and affect all constraints ! default =1.0
Returns
-------
Nothing but fills self.dlayer[Nc][0] (with a void list) and self.dlayer[Nc][1] (with the intial restricted box). Nc is the number of intersecting constraints
"""
# Nc = self.Nc - len(np.nonzero(np.array(self.type) == 'RSS')[0]) - len(np.nonzero(np.array(self.runable) == False)[0])
# Nc = self.Nc - len(np.nonzero(np.array(self.runable) == False)[0])
Nc = len(np.where(self.usable)[0])#self.Nc - len(np.nonzero(np.array(self.usable) == False)[0])
self.Nc = Nc
vcwmin = 1.0 # max(self.vcw)
step = 1.0
vcw1 = vcwmin + step
onlyRSS = False
if 'RSS' in self.type:
if 'TOA' not in self.type:
if 'TDOA' not in self.type:
onlyRSS = True
elif 'TDOA' not in self.type:
if 'TOA' not in self.type:
onlyRSS = True
while (step > 0.05) | (vcw1 == vcwmin):
self.setvcw(vcw1)
#constraints vcw set to current value
try:
del tlb
except:
pass
for c in self.c: # find intersection between all constraints for the current vcw
if (c.type != 'Exclude'):
if (c.type != 'RSS') or onlyRSS or RSS:
if c.usable:
lb = c.lbox
try:
tlb = tlb.intersect(lb)
except:
tlb = lb
else:
pass
else:
ex = c
try:
tlb = tlb.intersect(ex.lbox)
except:
pass
if len(tlb.box) == 0: # if the list is empty (no intersection ) vcw1 is increased
vcw1 = vcw1 + step
step = step * 1.2
#print step, vcw1
else: # if the list is not empty (intersection exist) vcw1 is decreased
vcw1 = max(vcw1 - step / 2., vcwmin) # vcw > vcwmin
step = step / 4.
#print step, vcw1
try:
if (np.diff(tlb.box[0].bd, axis=0)[0][0] == 0) | (np.diff(tlb.box[0].bd, axis=0)[0][1] == 0):
self.setvcw(vcw1 + 1.0)
except:
pass
# try:
# tlb = tlb.intersect(ex.lbox)
# except:
# pass
# pdb.set_trace()
self.vcw_init = vcw_init
self.dlayer[Nc] = [LBoxN([]), tlb]
self.dlayer[Nc][1].volume()
def valid_v(self, lv, N, RSS=True):
"""test a vertex list with constraints
Each vertexes from boxes pass into the list are tested to determine if the box is out (OB), ambiguous (AB) or enclosed (EB)
Parameters
----------
lv : a vertex list from BOXN.octants
N : number of constraints aka layer number
RSS : boolean
True : RSS constraints are kept as any other constraints for boxes evaluation (ambigous /enclosed)
False : RSS constraints are ignored in boxes evaluation (ambigous /enclosed)
Returns
-------
AB : a list with the numerous of Ambiguous Boxes
EB : a list with the numerous of Enclosed Boxes
"""
assert N <= self.Nc, " N > Number of Constraints "
Nmiss = self.Nc - N
miss_cpt = 0
f_flag = 0
o_flag = 0
pndim = pow(2, self.ndim)
sDDB = np.ones((4, len(lv)), dtype='bool')
sT = np.ones((4, len(lv) / pndim), dtype='bool')
sTAB = np.ones((len(lv) / pndim), dtype='bool')
TT = []
Ds = []
if RSS:
loop_condition="(c.type != 'Exclude') & (c.usable)"
else :
loop_condition="(c.type != 'RSS') & (c.type != 'Exclude') & (c.usable)"
for c in self.c: # for each constraints
if eval(loop_condition):
DDB, TB = c.valid_v(
lv) # .reshape(2,len(lv)/4,pow(2,self.ndim))
TT.append(TB)
if not (DDB[0].any()) | (DDB[1].any()): # if all boxes are out
self.erro[c.Id] = self.erro[c.Id] + 1
sDDB = DDB * sDDB
# ERROR CHECKER
AA = TB[0, :]
BB = TB[1, :]
CC = TB[2, :]
DD = TB[3, :]
TAB = (((~AA) * (~BB) * (DD)) + (BB * (~CC)
* (~DD)) + (AA * BB * (~CC)))
sTAB = (sTAB * TAB)
if self.ndim == 3:
B = (sDDB[0] * sDDB[1]).reshape(len(lv) / 8, 8)
sB = np.sum(B, axis=1)
EB = np.nonzero((sB) > 7)[0]
AB = np.nonzero((sB > 0) & (sB < 8))[0]
# error checker
ABt = np.nonzero(sTAB)[0]
AB = np.unique(np.hstack((AB, ABt)))
return (EB, AB)
if self.ndim == 2:
B = (sDDB[0] * sDDB[1]).reshape(len(lv) / 4, 4)
sB = np.sum(B, axis=1)
EB = np.nonzero((sB) > 3)[0]
AB = np.nonzero((sB > 0) & (sB < 4))[0]
# error checker
ABt = np.nonzero(sTAB)[0]
AB = np.unique(np.hstack((AB,ABt)))
return (EB, AB)
def refine(self, l, NBOXMAX=50, VOLMIN=0.001,RSS=True):
"""refine the l layer of the CLA
Refine the l layer of the CLA until the maximum number of boxes (NBOXMAX) or the minimal volume of boxes (VOLMIN) has been reached.
Once the CLA has been merged, this method aims to enclose the solution thanks to an octree/quadtreee process
self.dlayer[l][0] : LBox which contains boxes inside of the validity area (VA)
self.dlayer[l][1] : LBox which contains ambiguous boxes (partially inside of the VA == at least 1 edge inside the VA)
All boxes partially inside of the VA are divided into octants. Each octants are tested into the self.valid.
Parameters
----------
l : the layer number
NBOXMAX : the maximum number of obtained boxes
VOLMIN : the minimum volume achievable by the obtained boxes
Returns
-------
Nothing, but fills self.dlayer[l][0] and self.dlayer[l][1] respectively with enclosed boxes and ambiguous boxes
"""
self.iter = self.iter + 1
Nc = self.Nc
if self.iter == 1:
#print NBOXMAX
self.FINISHED = 0
self.erro = np.zeros(self.Nc)
a = []
# print 'iter', self.iter
B = self.dlayer[l][1].octant()
lv = B.bd2coord()
EB, AB = self.valid_v(lv, l,RSS=RSS)
del lv
self.erronous.append(self.erro)
nbox = len(EB)
nboxamb = len(AB)
# print nbox
# print nboxamb
# if all boxes are out of the VA
# if ((nboxamb==0)&(nbox==0)) and len(self.dlayer[l][0].box) == 0:
if ((nboxamb == 0) & (nbox == 0)) and len(self.dlayer[l][0].box) == 0:
if self.iter < 25:
pb = np.nonzero(self.erro != 0)[0]
if len(pb) != 0:
# print "specific size up", pb
self.rescale(1.2, pb)
self.annulus_bound(pb)
else:
# print 'all contraints size up '
self.rescale(1.2)
self.annulus_bound()
self.refine(l,NBOXMAX, VOLMIN,RSS)
else:
self.iter = 0
self.dlayer[l - 1] = self.dlayer[l]
# unstack to a lower the layer
l = l - 1
assert l >= 0, pdb.set_trace()
self.refine(l,NBOXMAX, VOLMIN,RSS)
# if it exists at least a box ambiguous or not in the VA
else:
if (nbox != 0 and nboxamb == 0):
self.FINISHED = 1
# Update EB
if len(EB) != 0:
self.dlayer[l][0].append_l(LBoxN(B.box[EB], ndim=self.ndim))
# Update AB
self.dlayer[l][1] = LBoxN(B.box[AB], ndim=self.ndim)
# check if it remains is more AB to refine
if nboxamb != 0:
lv = 1
else:
lv = 0
# while the max number of boxes (NBOXMAX) is not reached or the elementary volume of boxes (VOLMIN) is not reached
# self.refine is executed.
# else self.refine is over.
if (((nboxamb + nbox) < NBOXMAX) and (self.dlayer[l][lv].box[-1].vol > VOLMIN)) and self.FINISHED == 0:
self.refine(l,NBOXMAX, VOLMIN,RSS)
else:
self.iter = 0
self.Nc = l
def _show3(self, l=-1, amb=False, sc='all'):
"""
Parameters
----------
l : layer number to observe. If -1 estimation is made on the highest available layer. default = -1
amb : display ambiguous boxes. default = false
sc : display all constraint or give a list with the constrinat number to observe ex: [0,1,3]. default 'all'
Returns
-------
Nothing but calls a geomview instance
"""
Nc = self.Nc
par = self.parmsh
if l == -1:
if sc == 'all':
for c in self.c:
if c.runable:
c.parmsh['display'] = False
c.parmsh['scene'] = False
# if constrinat boxes has to be displayed
if par['constr_boxes']:
c.parmsh['boxes'] = False
else :
c.parmsh['boxes'] = True
c._show3()
else:
try:
for vsc in sc:
if self.c[vsc].runable:
self.c[vsc].parmsh['display'] = False
self.c[vsc].parmsh['scene'] = False
if par['constr_boxes']:
self.c[vsc].parmsh['boxes'] = False
else :
self.c[vsc].parmsh['boxes'] = True
fname = self.c[vsc]._show3()
except:
if self.c[sc].runable:
self.c[sc].parmsh['display'] = False
self.c[sc].parmsh['scene'] = False
if par['constr_boxes']:
self.c[sc].parmsh['boxes'] = False
else :
self.c[sc].parmsh['boxes'] = True
fname = self.c[sc]._show3()
else:
if c[l].runable:
self.c[l].parmsh['dispay'] = False
self.c[l].parmsh['scene'] = False
fname = self.c[l]._show3()
col = ['r', 'b', 'g', 'm', 'y', 'b', 'r']
if par['scene']:
an = np.zeros(len(self.bn))
for c in self.c:
if c.runable:
an = np.vstack((an, c.p))
# S = Scene(an=an, bn=self.bn)
# sce = S.generate()
if par['estimated']:
try:
mlab.point3d(self.pe[0],self.pe[1],self.pe[2])
except:
pass
if par['boxes']:
for l in self.dlayer.keys():
self.dlayer[l][0].parmsh['display'] = False
self.dlayer[l][1].parmsh['display'] = False
try:
self.dlayer[l][0]._show3(col='b', Id=l)
except:
pass
if amb:
fname = self.dlayer[l][1]._show3(col='r', Id=l + 1)
#
def show3(self, l=-1, amb=False, sc='all'):
""" Display constraints and theirs boxes through geomview.
geomview parameters are the following
self.parmsh['display']=False # launch geomview interactively
self.parmsh['scene']=True # display whole scene
self.parmsh['boxes']=True # display constraint box
self.parmsh['constr_boxes']=False # display constraint box
self.parmsh['estimated']=True # display estimated point
Parameters
----------
l : layer number to observe. If -1 estimation is made on the highest available layer. default = -1
amb : display ambiguous boxes. default = false
sc : display all constraint or give a list with the constrinat number to observe ex: [0,1,3]. default 'all'
Returns
-------
Nothing but calls a geomview instance
"""
Nc = self.Nc
filename = basename + "/geom/cla.list"
fd = open(filename, "w")
fd.write("LIST\n")
par = self.parmsh
if l == -1:
if sc == 'all':
for c in self.c:
if c.runable:
c.parmsh['display'] = False
c.parmsh['scene'] = False
# if constrinat boxes has to be displayed
if par['constr_boxes']:
c.parmsh['boxes'] = False
else :
c.parmsh['boxes'] = True
fname = c.show3()
fd.write("{<" + fname + ".list}\n")
else:
try:
for vsc in sc:
if self.c[vsc].runable:
self.c[vsc].parmsh['display'] = False
self.c[vsc].parmsh['scene'] = False
if par['constr_boxes']:
self.c[vsc].parmsh['boxes'] = False
else :
self.c[vsc].parmsh['boxes'] = True
fname = self.c[vsc].show3()
fd.write("{<" + fname + ".list}\n")
except:
if self.c[sc].runable:
self.c[sc].parmsh['display'] = False
self.c[sc].parmsh['scene'] = False
if par['constr_boxes']:
self.c[sc].parmsh['boxes'] = False
else :
self.c[sc].parmsh['boxes'] = True
fname = self.c[sc].show3()
fd.write("{<" + fname + ".list}\n")
else:
if c[l].runable:
self.c[l].parmsh['dispay'] = False
self.c[l].parmsh['scene'] = False
fname = self.c[l].show3()
fd.write("{<" + fname + ".list}\n")
col = ['r', 'b', 'g', 'm', 'y', 'b', 'r']
if par['scene']:
an = np.zeros(len(self.bn))
for c in self.c:
if c.runable:
an = np.vstack((an, c.p))
S = Scene(an=an, bn=self.bn)
sce = S.generate()
if par['estimated']:
try:
sce = g.cloud(self.pe, display=False, name='scene',
color='k', dice=6, access='append')
fd.write("{<" + sce + "}\n")
except:
pass
if par['boxes']:
for l in self.dlayer.keys():
self.dlayer[l][0].parmsh['display'] = False
self.dlayer[l][1].parmsh['display'] = False
try:
fname = self.dlayer[l][0].show3(col=col[Nc - l + 1], Id=l)
fd.write("{<" + fname + "}\n")
except:
pass
if amb:
coco = ['r', 'v', 'b', 'y']
fname = self.dlayer[l][1].show3(col=col[Nc - l], Id=l + 1)
# fname = self.dlayer[l][1].show3(col=coco,Id=l+1)
fd.write("{<" + fname + "}\n")
fd.close()
chaine = "geomview -nopanel -b 1 1 1 " + filename + " 2>/dev/null &"
os.system(chaine)
def prob(self, c, d):
""" determine probability of list of vertex
Return the probability of each vertex from an array in regard of the constraint origin, standard deviation and vcw
Parameters
----------
c : contraint number in the self.c list
d : an array of vertex
Returns
-------
v : probability of each vertex
"""
if self.c[c].type == 'TDOA':
v = (1 / ((self.c[c].sstd * self.c[c].vcw) * np.sqrt(2 * np.pi))) * np.exp(-(d - self.c[c].value * 0.3) ** 2 / (2 * (self.c[c].sstd) * self.c[c].vcw) ** 2)
elif self.c[c].type == 'TOA':
v = (1 / (((self.c[c].sstd) * self.c[c].vcw) * np.sqrt(2 * np.pi))) * np.exp(-(d - self.c[c].value * 0.3) ** 2 / (2 * (self.c[c].sstd) * self.c[c].vcw) ** 2)
elif self.c[c].type == 'RSS':
#
# v = (1/(((self.c[c].sstd)*self.c[c].vcw)*np.sqrt(2*np.pi)))*np.exp(-(d-self.c[c].value*0.3)**2/(2*(self.c[c].sstd)*self.c[c].vcw)**2)
# v=v[0]
S = (-self.c[c].sstd * np.log(10)) / (-10 * self.c[c].model.rssnp)
M = ((self.c[c].model.PL0 - self.c[c].value) *
np.log(10)) / (10 * self.c[c].model.rssnp)
v = 1 / (d * S * np.sqrt(2 * np.pi)) * np.exp(
-(((np.log(d) - M) ** 2) / (2. * (S ** 2))))
# std = self.c[c].sstd#10**(self.c[c].model['RSSnp']/20.)
## mean = self.c[c].range
# mean = np.log(self.c[c].range)+std**2
# v = 1/(d*np.sqrt(2*np.pi))*np.exp(-(np.log(d)-mean)**2/(2*std**2))
return(v)
# def gapdetect(self, l, dlindx):
# """basic gap detection
# Detects if separated clusters of boxes are observables. his situation is usual in under determined estimation.
# This only test on each axis if all boxes are contiguous. If not, a gap is declared and clusters are created.
#
# Parameters
# ----------
# l : layer numbero
# dlindx : select the boxes type ( from self.dlayer) for gap detection 0=enclose or 1=ambigous boxes
# Return
# ------
# clust : a list of array. each array contains boxes from the same cluster
# axis : axis/axes where gap has/have been detectes
# """
# gcoord = []
# axis = np.zeros(self.ndim, dtype='int8')
# clust = []
## c2={}
# for i in range(self.ndim):
# uni, inv, idd = np.unique(self.dlayer[l][dlindx]
# .bd[:, i], return_inverse=True, return_index=True)
## uni,inv,idd =np.unique(self.dlayer[l][dlindx].ctr[:,i],return_inverse=True,return_index=True)
# slope = np.diff(np.diff(uni))
## if len(slope) != 0:
# if len(slope) >1:
# if abs(np.min(slope)) > 1e-9:
## c2[i]=[]
# gidx = np.nonzero(np.min(slope) == slope)[0]
## print 'GAP DETECTED in AXIS',i
# axis[i] = 1
# try:
# # divis par 2 pour pouvoir aveir les index de cluster comme les centre des box
# clust.append(np.nonzero(uni[gidx[0]] < self.dlayer[l]
# [dlindx].bd[:, i])[0] / 2)
# clust.append(np.nonzero(uni[gidx[0]] > self.dlayer[l]
# [dlindx].bd[:, i])[0] / 2)
## c2[i].append(np.nonzero(uni[gidx[0]] < self.dlayer[l]
## [dlindx].bd[:, i])[0] / 2)
## c2[i].append(np.nonzero(uni[gidx[0]] < self.dlayer[l]
## [dlindx].bd[:, i])[0] / 2)
# except:
# pdb.set_trace()
# else:
# clust = []
# if clust !=[]:
# pdb.set_trace()
# return clust, axis
def gapdetect(self, l, dlindx):
"""basic gap detection
Detects if separated clusters of boxes are observables. his situation is usual in under determined estimation.
This only test on each axis if all boxes are contiguous. If not, a gap is declared and clusters are created.
Parameters
----------
l : layer number
dlindx : select the boxes type ( from self.dlayer) for gap detection 0=enclose or 1=ambigous boxes
Returns
------
clust : a list of array. each array contains boxes from the same cluster
axis : axis/axes where gap has/have been detectes
Example
-------
>>> from pylayers.location.geometric.constraints.cla import *
>>> from pylayers.location.geometric.constraints.toa import *
>>> from pylayers.location.geometric.constraints.exclude import *
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> a=np.array(([1,0,0]))
>>> b=np.array(([10,0,0]))
>>> nodes=np.array(([-10,10],[-10,10],[-1,1]))
>>> n= np.array((5,5,0))
>>> d1=np.sqrt(np.sum((a-n)**2))
>>> d2=np.sqrt(np.sum((b-n)**2))
>>> T1=TOA(id=1,value=d1/0.3,std=0.5,p=a)
>>> T2=TOA(id=2,value=d2/0.3,std=0.5,p=b)
>>> E=Exclude(nodes.T)
>>> T1.runable=True
>>> T2.runable=True
>>> C=CLA()
>>> C.append(T1)
>>> C.append(T2)
>>> C.append(E)
>>> C.merge2()
>>> C.refine(C.Nc)
>>> C.gapdetect(C.Nc,1)
"""
gcoord = []
axis = np.zeros(self.ndim, dtype='int8')
clust = []
c2={}
axis=np.zeros(self.ndim, dtype='int8')
for i in range(self.ndim):
# find all begining point on axis i
uA,iuA=np.unique(self.dlayer[l][dlindx].bd[::2,i],return_index=True)
# find all ending point on axis i
uB,iuB=np.unique(self.dlayer[l][dlindx].bd[1::2,i],return_index=True)
# remove 1st point in uA
uAA = uA[1:]
iuAA = iuA[1:]
# remove last point in uA
uBB = uB[:-1]
iuBB = iuB[:-1]
# u=[]
# # find center of all these segment
# [u.append((uA[k]+uA[k+1])/2) for k in range(len(uA)-1) ]
# # get all center of the boxes
# C=self.dlayer[l][dlindx].ctr[:,i]
# v=np.unique(C)
# if no gap, all begining point must also be ending point, otherwise,
# a gap exists
igap=[]
# [igap.append(ik) for ik,k in enumerate(u) if k not in v]
[igap.append(ik) for ik,k in enumerate(uAA) if k not in uBB]
if len(igap) > 1:
igap=[igap[0]]
# if a segment has a center which is not a box center , there is a gap
# indexes are split into 2 set
if not len(igap) ==0:
# in a futur version it will be more convenient to stock each
# detected cluster in a given axis with a dictionary as the given
# axis as a key.
# c2[i].append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]<=cm[igap]))
# c2[i].append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]>cm[igap]))
# clust.append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]<=gap)[0]/2)
# clust.append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]>gap)[0]/2)
clust.append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]<=uA[igap])[0]/2)
clust.append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]>uA[igap])[0]/2)
axis[i]=1
# else :
# clust = []
return clust,axis
def gapdetect2(self, l, dlindx):
"""basic gap detection
Detects if separated clusters of boxes are observables. his situation is usual in under determined estimation.
This only test on each axis if all boxes are contiguous. If not, a gap is declared and clusters are created.
requiere pyinterval class
Parameters
----------
l : layer number
dlindx : select the boxes type ( from self.dlayer) for gap detection 0=enclose or 1=ambigous boxes
Return
------
clust : a list of array. each array contains boxes from the same cluster
axis : axis/axes where gap has/have been detectes
Example
-------
>>> from pylayers.location.geometric.constraints.cla import *
>>> from pylayers.location.geometric.constraints.toa import *
>>> from pylayers.location.geometric.constraints.exclude import *
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> a=np.array(([1,0,0]))
>>> b=np.array(([10,0,0]))
>>> nodes=np.array(([-10,10],[-10,10],[-1,1]))
>>> n= np.array((5,5,0))
>>> d1=np.sqrt(np.sum((a-n)**2))
>>> d2=np.sqrt(np.sum((b-n)**2))
>>> T1=TOA(id=1,value=d1/0.3,std=np.array((0.5)),p=a)
>>> T2=TOA(id=2,value=d2/0.3,std=np.array((0.5)),p=b)
>>> E=Exclude(nodes.T)
>>> T1.runable=True
>>> T2.runable=True
>>> C=CLA()
>>> C.append(T1)
>>> C.append(T2)
>>> C.append(E)
>>> C.merge2()
>>> C.refine(C.Nc)
>>> C.gapdetect2(C.Nc,1)
"""
gcoord = []
axis = np.zeros(self.ndim, dtype='int8')
clust = []
c2={}
axis=np.zeros(self.ndim, dtype='int8')
for i in range(self.ndim):
# reshape boxes to be compliant with interval
Z=self.dlayer[l][dlindx].bd[:,i]
Zr=Z.reshape(len(Z)/2,2)
# create intervals
I=[interval(Zr[k]) for k in range(len(Zr))]
ii=interval()
# gather interval
for j in I:
ii=ii|j
# if a gap appears (more than a unique interval)
if len(ii)>1:
# in a futur version it will be more convenient to stock each
# detected cluster in a given axis with a dictionary as the given
# axis as a key.
# c2[i].append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]<=cm[igap]))
# c2[i].append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]>cm[igap]))
# clust.append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]<=gap)[0]/2)
# clust.append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]>gap)[0]/2)
clust.append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]<=ii[0][1])[0]/2)
clust.append(np.nonzero(self.dlayer[l][dlindx].bd[:,i]>=ii[1][0])[0]/2)
axis[i]=1
return clust,axis
def min_dist(self, a, b):
"""
OBSOLETE
"""
print('min dist')
pdb.set_trace()
# recherche distance entre barycentre et les centres des boites distance2barycentre(d2b)
d2b = np.sqrt(np.sum((a - b) ** 2, axis=1))
# on retourne pe comme etant le centre de la boite ayant le plus faible distrance avec barycentre
indx = np.nonzero(d2b == min(d2b))[0]
return(indx[0])
def estpos(self, l=-1, amb=False, test=False):
"""
DEPRECATED !
estpos(l,amb=True) : estimate position
l : layer number
amb : if True include ambigous boxes of VA in gravity center computation
"""
if l == -1:
l = np.max(self.dlayer.keys())
PP = []
self.saveP = []
for dlindx in range(2):
for i in range(len(self.dlayer[l][dlindx].box)):
poids = []
for j in range(len(self.c)):
if self.c[j].type != 'Exclude':
d = np.sqrt(np.sum((self.dlayer[l][dlindx].box[i].ctr - self.c[j].p) * (self.dlayer[l][dlindx].box[i].ctr - self.c[j].p)))
poids.append(self.prob(j, d))
P = sum(np.array(poids) * np.array(poids)) / (len(poids))
self.saveP.append(P)
PP.append(P * self.dlayer[l][dlindx].box[i].ctr)
self.pe = np.sum(PP, axis=0) / np.sum(self.saveP)
def estpos2(self, l=-1, amb=False,HT=False):
""" Position estimation
estimate position from the enclosed or/and ambibuous boxes
Parameters
----------
l : Layer of the estimation. If -1 estimation is made on the highest available layer
amb : Use ambiguous boxes (if available) to perform the position estimation. default = False
HT : boolean
True : if a cluster ppears (2 sets of distinct boxes ) an hypthesis testuing method is applied
in estpos2 method
False : no HT methos is applied
Hybrid positioning based on hypothesis thesting
N. Amiot, T. Pedersen, M. Laaraiedh, B. Uguen.
A Hybrid Positioning Method Based on Hypothesis Testing
,Wireless Communications Letters, IEEE, vol.1, no.4, pp.348-351, August 2012
Returns
-------
Nothing but fills self.pe with an array
"""
if l == -1:
l = np.max(self.dlayer.keys())
PP = []
poids = []
if len(self.dlayer[l][0].box) != 0: # si enclosed box exists
dlindx = 0
# print 'Enclosed pos estim'
else:
dlindx = 1
# print 'Amiguous pos estim'
self.saveP = np.zeros((len(self.dlayer[l][dlindx].box)))
if pyinterval_installed:
clust, axis = self.gapdetect2(l, dlindx)
else:
clust, axis = self.gapdetect(l, dlindx)
box_center = self.dlayer[l][dlindx].ctr
uc = np.where(self.usable)[0]
# proba computation for all center of each boxes
for j in uc:#range(len(self.c)):
#if self.c[j].type != 'Exclude':
if (self.c[j].type != 'Exclude') & (self.c[j].usable):
# compute distance between contraint center and all vertexes
if self.c[j].type == 'TOA' or self.c[j].type == 'RSS':
d = np.sqrt(np.sum((box_center - self.c[j].p * np.ones((len(box_center), 1))) ** 2, axis=1))
elif self.c[j].type == 'TDOA':
F1v = np.sqrt(np.sum((self.c[j].p[0] - box_center) * (self.c[j].p[0] - box_center), axis=1))
F2v = np.sqrt(np.sum((self.c[j].p[1] - box_center) * (self.c[j].p[1] - box_center), axis=1))
d = (F1v - F2v)
try:
poids = (poids * (self.prob(j, d)))
poids = (poids * poids.T) / len(poids)
except:
poids = (self.prob(j, d))
poids = (poids * poids.T) / len(poids)
# poids.append(self.prob(j,d))
# pdb.set_trace()
# P=sum(np.array(poids)*np.array(poids))/(len(poids))
# self.saveP[i]=P
self.saveP = poids
# PP.append(P*self.dlayer[l][dlindx].box[i].ctr)
##########################################
self.pecluster=[]
if clust != []:
# print 'cluster'
lclust = []
dd = []
mps = -1.0
saxis = sum(axis)
p = 1
for i in range(len(axis)):
if axis[i] != 0:
try:
count = np.vstack((count, np.repeat(range(2 * (p - 1), (2 * (p - 1)) + 2) * (pow(2, saxis - p)), p)))
except:
count = np.repeat(range(2 * (p - 1), (2 * (p - 1)) + 2)
* (pow(2, saxis - p)), p)
p = p + 1
count = count.T
lpc=[]
for i in range(len(clust)):
if len(clust) < 3:
clusters = clust[i]
else:
if len(np.shape(count)) > 1:
clusters = np.intersect1d(clust[count[i,0]], clust[count[i, 1]])
else:
clusters = np.intersect1d(clust[count[0]], clust[count[1]])
clust_vol = np.sum(np.array(self.dlayer[l][
dlindx].vol)[np.unique(clusters)])
if len(clusters) != 0:
mp = np.max(self.saveP[clusters])
if mps < mp:
mps = mp
estclu = clusters
itoas=np.where(np.array(self.type)=='TOA')[0]
if clust_vol != 0 and len(itoas) == 2:
lclust.append(clusters)
pc = np.sum(np.array(self.dlayer[l][dlindx].ctr)[np.unique(clusters)], axis=0) / len(np.unique(clusters))
lpc.append(pc)
# verifier que les contraintes utilises sont les bonne ( ce n'est pas le cas)
# ne marche que si 2 constriantes genere le cluster ( a robustifier)
pu = np.where(self.usable)[0]
# try:
# dd.append(np.sqrt(np.sum((pc - self.c[itoas[0]].p) ** 2)))
# except:
# dd.append(np.sqrt(np.sum((pc - self.c[itoas[1]].p) ** 2)))
# print pc
try:
dd.append(np.sqrt(np.sum((pc - self.c[itoas[0]].p) ** 2)))
except:
dd.append(np.sqrt(np.sum((pc - self.c[itoas[1]].p) ** 2)))
print(pc)
# try:
# vmax=[]
# for i in range(len(lclust)):
# vmax.append(np.max(poids[np.unique(lclust[i])]))
# peindx = np.nonzero(poids==max(vmax))[0][0]
# self.pe = self.dlayer[l][dlindx].ctr[peindx]
if HT:
#print "enter in HT processing"
try:
# for now, it is supposed that all RSS share the same model
rssvalues=[]
icr=np.where(np.array(self.type)=='RSS')[0]
for irss in range(len(icr)):
d0=np.sqrt(np.sum((self.c[icr[irss]].p-lpc[0])**2))
d1=np.sqrt(np.sum((self.c[icr[irss]].p-lpc[1])**2))
rssvalues.append(self.c[icr[irss]].value)
try:
drss= np.vstack((drss,np.array((d0,d1))))
except:
drss= np.array((d0,d1))
if len(np.shape(drss))==1:
drss=drss.reshape(1,2)
M = (((-self.c[icr[0]].model.PL0 - self.c[icr[0]].value) * np.log(10) ) / (10. * self.c[icr[0]].model.rssnp))
PL0= -self.c[icr[0]].model.PL0
NP = self.c[icr[0]].model.rssnp
mu1=PL0-10*NP*np.log10(drss[:,0])
mu2=PL0-10*NP*np.log10(drss[:,1])
sig=self.c[icr[0]].model.sigrss
values=np.array((rssvalues))
LT=np.sum(1/(2.*sig**2)*(mu2**2-mu1**2))
RT=np.sum((1/(1.*sig))*values*(mu1-mu2))
# LL = np.log(dd[1] / dd[0]) * (1 + np.log(dd[0] * dd[1]) - 2 * M)
# if LL > 0:
if LT>RT:
# vmax = np.max(poids[np.unique(lclust[0])])
# peindx=np.nonzero(poids[vmax]==poids)[0][0]
# self.pe = self.dlayer[l][dlindx].ctr[np.unique(lclust[0])[peindx]]
#if LL>0 cluster 0 is selctionned and tits centroids is chosen as position estimation
self.pe = np.mean(self.dlayer[l][dlindx].ctr[
np.unique(lclust[0])], axis=0)
# print("HT processing done")
pestdmax = np.max(self.dlayer[l][
dlindx].ctr[np.unique(lclust[0])])
pestdmin = np.min(self.dlayer[l][
dlindx].ctr[np.unique(lclust[0])])
self.pestd = pestdmax - pestdmin
else:
#if LL<0 cluster 1 is selctionned and tits centroids is chosen as position estimation
self.pe = np.mean(self.dlayer[l][dlindx].ctr[
np.unique(lclust[1])], axis=0)
pestdmax = np.max(self.dlayer[l][
dlindx].ctr[np.unique(lclust[1])])
pestdmin = np.min(self.dlayer[l][
dlindx].ctr[np.unique(lclust[1])])
self.pestd = pestdmax - pestdmin
# if HT fail for some reasons , a classical position estimation is performed
except:
# print "!!!!! HT FAIL !!!!!!!"
# print "2 first constraint of CLA have to be TOA and others RSS in order to use HT"
if np.sum(poids) > 0.:
self.pe = np.sum(poids * self.dlayer[l][dlindx]
.ctr.T, axis=1) / np.sum(poids)
else:
self.pe = np.sum(self.dlayer[l][dlindx].ctr, axis=0) / \
len(self.dlayer[l][dlindx].ctr)
pestdmax = np.max(self.dlayer[l][dlindx].bd, axis=0)
pestdmin = np.min(self.dlayer[l][dlindx].bd, axis=0)
self.pestd = pestdmax - pestdmin
# if no HT
else:
if np.sum(poids) > 0.:
self.pe = np.sum(poids * self.dlayer[l][dlindx]
.ctr.T, axis=1) / np.sum(poids)
else:
self.pe = np.sum(self.dlayer[l][dlindx].ctr, axis=0) / \
len(self.dlayer[l][dlindx].ctr)
pestdmax = np.max(self.dlayer[l][dlindx].bd, axis=0)
pestdmin = np.min(self.dlayer[l][dlindx].bd, axis=0)
self.pestd = pestdmax - pestdmin
# store the centroid of clusters into self.peclsuter
for cl in lclust:
self.pecluster.append(np.mean(self.dlayer[l][dlindx].ctr[
np.unique(cl)], axis=0))
# if not cluster
else:
if np.sum(poids) > 0.:
self.pe = np.sum(poids * self.dlayer[l][
dlindx].ctr.T, axis=1) / np.sum(poids)
else:
self.pe = np.sum(self.dlayer[l][dlindx].ctr,
axis=0) / len(self.dlayer[l][dlindx].ctr)
pestdmax = np.max(self.dlayer[l][dlindx].bd, axis=0)
pestdmin = np.min(self.dlayer[l][dlindx].bd, axis=0)
self.pestd = pestdmax - pestdmin
self.pecluster=[self.pe]
| {
"content_hash": "c19d9d9c9132598d45f4b1880c55918a",
"timestamp": "",
"source": "github",
"line_count": 1525,
"max_line_length": 259,
"avg_line_length": 36.18229508196721,
"alnum_prop": 0.4783428177896988,
"repo_name": "pylayers/pylayers",
"id": "b3546a25574289efd36dd594d17d7bbea1fb23fa",
"size": "55200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pylayers/location/geometric/constraints/cla.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "584"
},
{
"name": "Jupyter Notebook",
"bytes": "52724429"
},
{
"name": "Python",
"bytes": "3907177"
},
{
"name": "Shell",
"bytes": "1512"
}
],
"symlink_target": ""
} |
import logging
from telemetry import decorators
from telemetry.core import exceptions
from telemetry.core import util
from telemetry.core.backends.chrome import cros_test_case
class CrOSCryptohomeTest(cros_test_case.CrOSTestCase):
@decorators.Enabled('chromeos')
def testCryptohome(self):
"""Verifies cryptohome mount status for regular and guest user and when
logged out"""
with self._CreateBrowser() as b:
self.assertEquals(1, len(b.tabs))
self.assertTrue(b.tabs[0].url)
self.assertTrue(self._IsCryptohomeMounted())
# TODO(achuith): Remove dependency on /home/chronos/user.
chronos_fs = self._cri.FilesystemMountedAt('/home/chronos/user')
self.assertTrue(chronos_fs)
if self._is_guest:
self.assertEquals(chronos_fs, 'guestfs')
else:
crypto_fs = self._cri.FilesystemMountedAt(
self._cri.CryptohomePath(self._username))
self.assertEquals(crypto_fs, chronos_fs)
self.assertFalse(self._IsCryptohomeMounted())
self.assertEquals(self._cri.FilesystemMountedAt('/home/chronos/user'),
'/dev/mapper/encstateful')
class CrOSLoginTest(cros_test_case.CrOSTestCase):
@decorators.Enabled('chromeos')
def testLoginStatus(self):
"""Tests autotestPrivate.loginStatus"""
if self._is_guest:
return
with self._CreateBrowser(autotest_ext=True) as b:
login_status = self._GetLoginStatus(b)
self.assertEquals(type(login_status), dict)
self.assertEquals(not self._is_guest, login_status['isRegularUser'])
self.assertEquals(self._is_guest, login_status['isGuest'])
self.assertEquals(login_status['email'], self._username)
self.assertFalse(login_status['isScreenLocked'])
@decorators.Enabled('chromeos')
def testLogout(self):
"""Tests autotestPrivate.logout"""
if self._is_guest:
return
with self._CreateBrowser(autotest_ext=True) as b:
extension = self._GetAutotestExtension(b)
try:
extension.ExecuteJavaScript('chrome.autotestPrivate.logout();')
except exceptions.Error:
pass
util.WaitFor(lambda: not self._IsCryptohomeMounted(), 20)
@decorators.Enabled('chromeos')
def testGaiaLogin(self):
"""Tests gaia login. Credentials are expected to be found in a
credentials.txt file, with a single line of format username:password."""
if self._is_guest:
return
(username, password) = self._Credentials('credentials.txt')
if username and password:
with self._CreateBrowser(gaia_login=True,
username=username,
password=password):
self.assertTrue(util.WaitFor(self._IsCryptohomeMounted, 10))
class CrOSScreenLockerTest(cros_test_case.CrOSTestCase):
def _IsScreenLocked(self, browser):
return self._GetLoginStatus(browser)['isScreenLocked']
def _LockScreen(self, browser):
self.assertFalse(self._IsScreenLocked(browser))
extension = self._GetAutotestExtension(browser)
self.assertTrue(extension.EvaluateJavaScript(
"typeof chrome.autotestPrivate.lockScreen == 'function'"))
logging.info('Locking screen')
extension.ExecuteJavaScript('chrome.autotestPrivate.lockScreen();')
logging.info('Waiting for the lock screen')
def ScreenLocked():
return (browser.oobe_exists and
browser.oobe.EvaluateJavaScript("typeof Oobe == 'function'") and
browser.oobe.EvaluateJavaScript(
"typeof Oobe.authenticateForTesting == 'function'"))
util.WaitFor(ScreenLocked, 10)
self.assertTrue(self._IsScreenLocked(browser))
def _AttemptUnlockBadPassword(self, browser):
logging.info('Trying a bad password')
def ErrorBubbleVisible():
return not browser.oobe.EvaluateJavaScript('''
document.getElementById('bubble').hidden
''')
self.assertFalse(ErrorBubbleVisible())
browser.oobe.ExecuteJavaScript('''
Oobe.authenticateForTesting('%s', 'bad');
''' % self._username)
util.WaitFor(ErrorBubbleVisible, 10)
self.assertTrue(self._IsScreenLocked(browser))
def _UnlockScreen(self, browser):
logging.info('Unlocking')
browser.oobe.ExecuteJavaScript('''
Oobe.authenticateForTesting('%s', '%s');
''' % (self._username, self._password))
util.WaitFor(lambda: not browser.oobe_exists, 10)
self.assertFalse(self._IsScreenLocked(browser))
@decorators.Disabled
def testScreenLock(self):
"""Tests autotestPrivate.screenLock"""
if self._is_guest:
return
with self._CreateBrowser(autotest_ext=True) as browser:
self._LockScreen(browser)
self._AttemptUnlockBadPassword(browser)
self._UnlockScreen(browser)
| {
"content_hash": "2b5b70f4cb57a7c66af5edb45be7371c",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 76,
"avg_line_length": 37.13385826771653,
"alnum_prop": 0.6923240033927057,
"repo_name": "sgraham/nope",
"id": "93b20f33bc9062f33eb5749b919a67515ac34d78",
"size": "4879",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/telemetry/telemetry/core/backends/chrome/cros_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "39967"
},
{
"name": "C",
"bytes": "4061434"
},
{
"name": "C++",
"bytes": "279546186"
},
{
"name": "CMake",
"bytes": "27212"
},
{
"name": "CSS",
"bytes": "919339"
},
{
"name": "Emacs Lisp",
"bytes": "988"
},
{
"name": "Go",
"bytes": "13628"
},
{
"name": "Groff",
"bytes": "5283"
},
{
"name": "HTML",
"bytes": "15989749"
},
{
"name": "Java",
"bytes": "7541683"
},
{
"name": "JavaScript",
"bytes": "32372588"
},
{
"name": "Lua",
"bytes": "16189"
},
{
"name": "Makefile",
"bytes": "40513"
},
{
"name": "Objective-C",
"bytes": "1584184"
},
{
"name": "Objective-C++",
"bytes": "8249988"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "169060"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "427339"
},
{
"name": "Python",
"bytes": "8346306"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "844553"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "VimL",
"bytes": "4075"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
"""Helper functions"""
from datetime import date, datetime, timedelta
__all__ = [
"date_from_iso_week",
"find_slots",
"PicklableSlotMixin",
]
def date_from_iso_week(year, week, day_of_week=None):
if day_of_week is None:
day_of_week = 1
if not 1 <= day_of_week <= 7:
raise ValueError(
f"Day of week is not in range 1 through 7, got {day_of_week!r}"
)
day = datetime.strptime(f"{year:04d}-{week:02d}-{day_of_week:d}", "%Y-%W-%w")
# ISO week 1 is defined as the first week to have 4 or more days in January.
# Python's built-in date parsing considers the week that contain the first
# Monday of the year to be the first week.
if date(year, 1, 4).isoweekday() > 4:
day -= timedelta(days=7)
return day.date()
def find_slots(cls):
"""Return a set of all slots for a given class and its parents"""
slots = set()
for c in cls.__mro__:
cslots = getattr(c, "__slots__", tuple())
if not cslots:
continue
elif isinstance(cslots, str):
cslots = (cslots,)
slots.update(cslots)
return slots
class PicklableSlotMixin(object):
__slots__ = ()
def __getstate__(self):
return {attr: getattr(self, attr) for attr in find_slots(self.__class__)}
def __setstate__(self, data):
for attr, value in data.items():
setattr(self, attr, value)
class PartialOrderingMixin(object):
__slots__ = ()
def __le__(self, other):
lt = self.__lt__(other)
eq = self.__eq__(other)
if lt is NotImplemented and eq is NotImplemented:
return NotImplemented
return lt is True or eq is True
def __gt__(self, other):
le = self.__le__(other)
if le is NotImplemented:
return NotImplemented
return not le
def __ge__(self, other):
gt = self.__gt__(other)
eq = self.__eq__(other)
if gt is NotImplemented and eq is NotImplemented:
return NotImplemented
return gt is True or eq is True
def __ne__(self, other):
eq = self.__eq__(other)
if eq is NotImplemented:
return NotImplemented
return not eq
| {
"content_hash": "f99f4eafcda244e683efcf4330220bfe",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 81,
"avg_line_length": 25.213483146067414,
"alnum_prop": 0.570855614973262,
"repo_name": "runfalk/spans",
"id": "af0b9c1a5ce9b7a4c39c173650aaae080c7f0602",
"size": "2244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spans/_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "116948"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.