text stringlengths 4 1.02M | meta dict |
|---|---|
with open("input.txt", "r") as f:
puzzle_input = f.read().replace("\n", "")
# Coordinates: (x, y)
class Santa:
def __init__(self):
self.x = 0
self.y = 0
def move(self, direction):
if direction == "^":
self.y += 1
elif direction == ">":
self.x += 1
elif direction == "<":
self.x += -1
elif direction == "v":
self.y += -1
def get_pos(self):
return (self.x, self.y)
class House:
houses = []
def __init__(self, x, y):
self.x = x
self.y = y
self.presents = 0
House.houses.append(self)
def deliver(self):
self.presents += 1
def get_pos(self):
return (self.x, self.y)
@classmethod
def get_house(cls, house_coords):
for house in cls.houses:
if house_coords == (house.x, house.y):
return house
return None
@classmethod
def total_houses(cls):
return len(cls.houses)
@classmethod
def total_presents(cls):
total = 0
for house in cls.houses:
total += house.presents
return total
@classmethod
def total_with_presents(cls):
current_total = 0
for house in cls.houses:
if house.presents > 0:
current_total += 1
return current_total
santa = Santa()
House.get_house((0, 0)).deliver()
for direction in puzzle_input:
santa.move(direction)
house = House.get_house(santa.get_pos())
if house is not None:
house.deliver()
else:
House(*santa.get_pos())
print(House.total_houses())
print(House.total_presents())
print(House.total_with_presents())
| {
"content_hash": "eae60158cf1b34d3e50308b3c3a77a43",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 50,
"avg_line_length": 21.308641975308642,
"alnum_prop": 0.5283893395133256,
"repo_name": "foxscotch/advent-of-code",
"id": "325c48dcbf68d3dc5b932280cfdad56acf831087",
"size": "1726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "2015/03/p1-old.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "15714"
},
{
"name": "Python",
"bytes": "67825"
}
],
"symlink_target": ""
} |
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.expected_conditions import *
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import TimeoutException
class Page(object):
def __init__(self, driver, base_url):
self.driver = driver
self.base_url = base_url
self.wait = WebDriverWait(driver, 20)
def is_element_visible(self, locator):
try:
self.wait.until(visibility_of_element_located(locator))
return True
except WebDriverException:
return False
def is_element_present(self, *locator):
try:
self.wait.until(EC.presence_of_element_located(*locator))
return True
except NoSuchElementException:
return False
finally:
self.wait.until(EC.presence_of_element_located(*locator)) | {
"content_hash": "0460e07c27e0a0746ca5d0c025951182",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 69,
"avg_line_length": 36.333333333333336,
"alnum_prop": 0.6829765545361876,
"repo_name": "mkpythonanywhereblog/selenium-python-gmail",
"id": "2836a912751f720e82ae93a50e6e6ffc604b5648",
"size": "1005",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gmail/pages/page.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24067"
}
],
"symlink_target": ""
} |
import os
import pytest
from headlessvim.process import Process
@pytest.fixture
def default_args(request):
return '-N -i NONE -n -u NONE'
@pytest.fixture
def env(request):
return dict(os.environ, LANG='C')
@pytest.fixture
def unterminated_process(request, default_args, env):
return Process('vim', default_args, env)
@pytest.yield_fixture
def process(request, unterminated_process):
process = unterminated_process
yield process
process.terminate()
def test_terminate(unterminated_process):
process = unterminated_process
assert process.is_alive()
process.terminate()
assert not process.is_alive()
def test_kill(unterminated_process):
process = unterminated_process
assert process.is_alive()
process.kill()
assert not process.is_alive()
def test_executable(process):
assert 'vim' in process.executable
assert os.path.isabs(process.executable)
def test_args(process, default_args):
assert process.args == default_args
def test_stdin(process):
assert hasattr(process.stdin, 'read')
def test_stdout(process):
assert hasattr(process.stdout, 'write')
| {
"content_hash": "23d79ca4322bfab9a71a5f1abb05904a",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 53,
"avg_line_length": 19.70689655172414,
"alnum_prop": 0.7200349956255468,
"repo_name": "manicmaniac/headlessvim",
"id": "6522f09a44f3bb1ca6257b2b10ddc00ec8d1d449",
"size": "1189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_process.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26862"
}
],
"symlink_target": ""
} |
from nose.tools import * # flake8: noqa
from api.base.settings.defaults import API_BASE
from tests.base import ApiWikiTestCase, ApiTestCase
from tests.factories import AuthUserFactory, ProjectFactory, NodeWikiFactory, RegistrationFactory
class TestNodeWikiList(ApiWikiTestCase):
def _set_up_public_project_with_wiki_page(self):
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_wiki = self._add_project_wiki_page(self.public_project, self.user)
self.public_url = '/{}nodes/{}/wikis/'.format(API_BASE, self.public_project._id)
def _set_up_private_project_with_wiki_page(self):
self.private_project = ProjectFactory(creator=self.user)
self.private_wiki = self._add_project_wiki_page(self.private_project, self.user)
self.private_url = '/{}nodes/{}/wikis/'.format(API_BASE, self.private_project._id)
def _set_up_public_registration_with_wiki_page(self):
self._set_up_public_project_with_wiki_page()
self.public_registration = RegistrationFactory(project=self.public_project, user=self.user, is_public=True)
self.public_registration_wiki_id = self.public_registration.wiki_pages_versions['home'][0]
self.public_registration.wiki_pages_current = {'home': self.public_registration_wiki_id}
self.public_registration.save()
self.public_registration_url = '/{}registrations/{}/wikis/'.format(API_BASE, self.public_registration._id)
def _set_up_registration_with_wiki_page(self):
self._set_up_private_project_with_wiki_page()
self.registration = RegistrationFactory(project=self.private_project, user=self.user)
self.registration_wiki_id = self.registration.wiki_pages_versions['home'][0]
self.registration.wiki_pages_current = {'home': self.registration_wiki_id}
self.registration.save()
self.registration_url = '/{}registrations/{}/wikis/'.format(API_BASE, self.registration._id)
def test_return_public_node_wikis_logged_out_user(self):
self._set_up_public_project_with_wiki_page()
res = self.app.get(self.public_url)
assert_equal(res.status_code, 200)
wiki_ids = [wiki['id'] for wiki in res.json['data']]
assert_in(self.public_wiki._id, wiki_ids)
def test_return_public_node_wikis_logged_in_non_contributor(self):
self._set_up_public_project_with_wiki_page()
res = self.app.get(self.public_url, auth=self.non_contributor.auth)
assert_equal(res.status_code, 200)
wiki_ids = [wiki['id'] for wiki in res.json['data']]
assert_in(self.public_wiki._id, wiki_ids)
def test_return_public_node_wikis_logged_in_contributor(self):
self._set_up_public_project_with_wiki_page()
res = self.app.get(self.public_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
wiki_ids = [wiki['id'] for wiki in res.json['data']]
assert_in(self.public_wiki._id, wiki_ids)
def test_return_private_node_wikis_logged_out_user(self):
self._set_up_private_project_with_wiki_page()
res = self.app.get(self.private_url, expect_errors=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], 'Authentication credentials were not provided.')
def test_return_private_node_wikis_logged_in_non_contributor(self):
self._set_up_private_project_with_wiki_page()
res = self.app.get(self.private_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
def test_return_private_node_wikis_logged_in_contributor(self):
self._set_up_private_project_with_wiki_page()
res = self.app.get(self.private_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
wiki_ids = [wiki['id'] for wiki in res.json['data']]
assert_in(self.private_wiki._id, wiki_ids)
def test_return_registration_wikis_logged_out_user(self):
self._set_up_registration_with_wiki_page()
res = self.app.get(self.registration_url, expect_errors=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], 'Authentication credentials were not provided.')
def test_return_registration_wikis_logged_in_non_contributor(self):
self._set_up_registration_with_wiki_page()
res = self.app.get(self.registration_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
def test_return_registration_wikis_logged_in_contributor(self):
self._set_up_registration_with_wiki_page()
res = self.app.get(self.registration_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
wiki_ids = [wiki['id'] for wiki in res.json['data']]
assert_in(self.registration_wiki_id, wiki_ids)
def test_wikis_not_returned_for_withdrawn_registration(self):
self._set_up_registration_with_wiki_page()
self.registration.is_public = True
withdrawal = self.registration.retract_registration(user=self.user, save=True)
token = withdrawal.approval_state.values()[0]['approval_token']
withdrawal.approve_retraction(self.user, token)
withdrawal.save()
res = self.app.get(self.registration_url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
def test_public_node_wikis_relationship_links(self):
self._set_up_public_project_with_wiki_page()
res = self.app.get(self.public_url)
expected_nodes_relationship_url = '{}nodes/{}/'.format(API_BASE, self.public_project._id)
expected_comments_relationship_url = '{}nodes/{}/comments/'.format(API_BASE, self.public_project._id)
assert_in(expected_nodes_relationship_url, res.json['data'][0]['relationships']['node']['links']['related']['href'])
assert_in(expected_comments_relationship_url, res.json['data'][0]['relationships']['comments']['links']['related']['href'])
def test_private_node_wikis_relationship_links(self):
self._set_up_private_project_with_wiki_page()
res = self.app.get(self.private_url, auth=self.user.auth)
expected_nodes_relationship_url = '{}nodes/{}/'.format(API_BASE, self.private_project._id)
expected_comments_relationship_url = '{}nodes/{}/comments/'.format(API_BASE, self.private_project._id)
assert_in(expected_nodes_relationship_url, res.json['data'][0]['relationships']['node']['links']['related']['href'])
assert_in(expected_comments_relationship_url, res.json['data'][0]['relationships']['comments']['links']['related']['href'])
def test_public_registration_wikis_relationship_links(self):
self._set_up_public_registration_with_wiki_page()
res = self.app.get(self.public_registration_url)
expected_nodes_relationship_url = '{}registrations/{}/'.format(API_BASE, self.public_registration._id)
expected_comments_relationship_url = '{}registrations/{}/comments/'.format(API_BASE, self.public_registration._id)
assert_in(expected_nodes_relationship_url, res.json['data'][0]['relationships']['node']['links']['related']['href'])
assert_in(expected_comments_relationship_url, res.json['data'][0]['relationships']['comments']['links']['related']['href'])
def test_private_registration_wikis_relationship_links(self):
self._set_up_registration_with_wiki_page()
res = self.app.get(self.registration_url, auth=self.user.auth)
expected_nodes_relationship_url = '{}registrations/{}/'.format(API_BASE, self.registration._id)
expected_comments_relationship_url = '{}registrations/{}/comments/'.format(API_BASE, self.registration._id)
assert_in(expected_nodes_relationship_url, res.json['data'][0]['relationships']['node']['links']['related']['href'])
assert_in(expected_comments_relationship_url, res.json['data'][0]['relationships']['comments']['links']['related']['href'])
def test_registration_wikis_not_returned_from_nodes_endpoint(self):
self._set_up_public_project_with_wiki_page()
self._set_up_public_registration_with_wiki_page()
res = self.app.get(self.public_url)
node_relationships = [
node_wiki['relationships']['node']['links']['related']['href']
for node_wiki in res.json['data']
]
assert_equal(res.status_code, 200)
assert_equal(len(node_relationships), 1)
assert_in(self.public_project._id, node_relationships[0])
def test_node_wikis_not_returned_from_registrations_endpoint(self):
self._set_up_public_project_with_wiki_page()
self._set_up_public_registration_with_wiki_page()
res = self.app.get(self.public_registration_url)
node_relationships = [
node_wiki['relationships']['node']['links']['related']['href']
for node_wiki in res.json['data']
]
assert_equal(res.status_code, 200)
assert_equal(len(node_relationships), 1)
assert_in(self.public_registration._id, node_relationships[0])
class TestFilterNodeWikiList(ApiTestCase):
def setUp(self):
super(TestFilterNodeWikiList, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
self.base_url = '/{}nodes/{}/wikis/'.format(API_BASE, self.project._id)
self.wiki = NodeWikiFactory(node=self.project, user=self.user)
self.date = self.wiki.date.strftime('%Y-%m-%dT%H:%M:%S.%f')
def test_node_wikis_with_no_filter_returns_all(self):
res = self.app.get(self.base_url, auth=self.user.auth)
wiki_ids = [wiki['id'] for wiki in res.json['data']]
assert_in(self.wiki._id, wiki_ids)
def test_filter_wikis_by_page_name(self):
url = self.base_url + '?filter[name]=home'
res = self.app.get(url, auth=self.user.auth)
assert_equal(len(res.json['data']), 1)
assert_equal(res.json['data'][0]['attributes']['name'], 'home')
def test_filter_wikis_modified_on_date(self):
url = self.base_url + '?filter[date_modified][eq]={}'.format(self.date)
res = self.app.get(url, auth=self.user.auth)
assert_equal(len(res.json['data']), 1)
def test_filter_wikis_modified_before_date(self):
url = self.base_url + '?filter[date_modified][lt]={}'.format(self.date)
res = self.app.get(url, auth=self.user.auth)
assert_equal(len(res.json['data']), 0)
def test_filter_wikis_modified_after_date(self):
url = self.base_url + '?filter[date_modified][gt]={}'.format(self.date)
res = self.app.get(url, auth=self.user.auth)
assert_equal(len(res.json['data']), 0)
| {
"content_hash": "e05b27786f9b8cd9e28ec6838ac2e9d0",
"timestamp": "",
"source": "github",
"line_count": 198,
"max_line_length": 131,
"avg_line_length": 55.96969696969697,
"alnum_prop": 0.6681104493773687,
"repo_name": "rdhyee/osf.io",
"id": "91dd7c27e3a03d087f4d29867c5b4e1e04bb22d2",
"size": "11106",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "api_tests/nodes/views/test_node_wiki_list.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "174764"
},
{
"name": "HTML",
"bytes": "131860"
},
{
"name": "JavaScript",
"bytes": "1663707"
},
{
"name": "Mako",
"bytes": "679787"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "6720626"
}
],
"symlink_target": ""
} |
from django import template
register = template.Library()
@register.filter
def reportdict(input, property):
if property in input:
if input[property]:
return input[property]
return 0
| {
"content_hash": "1bb21d11b68949d38384079a09800340",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 34,
"avg_line_length": 17.833333333333332,
"alnum_prop": 0.677570093457944,
"repo_name": "unicefuganda/edtrac",
"id": "dec2746a479e023c7089c4ff81854598f0654585",
"size": "214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "edtrac_project/rapidsms_generic/generic/templatetags/reportdict.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "277434"
},
{
"name": "JavaScript",
"bytes": "190346"
},
{
"name": "Python",
"bytes": "2621572"
},
{
"name": "Shell",
"bytes": "4755"
}
],
"symlink_target": ""
} |
import tornado.websocket
from tornado import gen
import json
import config
@gen.coroutine
def test_ws():
client = yield tornado.websocket.websocket_connect(config.WebSocket.url)
msg = ""
while True:
msg = input("-> ")
if msg:
tweet = { "user":
{ "screen_name": config.Client.nick,
"name": config.Client.name
},
"entities":"",
"channel":input("channel "),
"text": msg }
#client.send(json.dumps(tweet))
client.write_message(json.dumps(tweet))
if msg == "quit":
client.close()
message = yield client.read_message()
print( message )
if __name__ == "__main__":
tornado.ioloop.IOLoop.instance().run_sync(test_ws)
| {
"content_hash": "6ab9cec0b98062abfa45efc0f962bd26",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 76,
"avg_line_length": 27.03125,
"alnum_prop": 0.4936416184971098,
"repo_name": "r3ek0/infowall",
"id": "4f760608b58b30cc40e2f8ca36d0af7e542a5f46",
"size": "888",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tornado_client.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1420"
},
{
"name": "HTML",
"bytes": "2477"
},
{
"name": "JavaScript",
"bytes": "59"
},
{
"name": "Python",
"bytes": "7407"
}
],
"symlink_target": ""
} |
from decimal import Decimal
import requests
import logging
from ... import utils
import config
def receive_btc():
"""generates a Bitcoin receiving address"""
# generate new blockchain.info wallet addr
r = requests.get("https://blockchain.info/merchant/%s/new_address?password=%s&second_password=%s"
% (config.guid, config.main_password, config.second_password))
r.raise_for_status()
new_addr = r.json()['address']
print "generated new address %s" % new_addr
return new_addr
def send_btc(to_addr, amount):
amount = Decimal(amount)
print "sending %s btc to %s" % (amount, to_addr)
if utils.yesornoquestion("are you sure this is correct?", default=True):
# send btc from wallet to user
r = requests.get("https://blockchain.info/merchant/%s/payment?password=%s&second_password=%s&to=%s&amount=%s&shared=%s"
% (config.guid, config.main_password, config.second_password, to_addr, int(Decimal(100000000) * amount), config.shared))
r.raise_for_status()
print r.json()
print "sent %s btc from wallet to %s" % (amount, to_addr)
else:
print "retrying"
main.send_coins(amount)
| {
"content_hash": "4c5765492027553baec8cb051892eb07",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 128,
"avg_line_length": 33.411764705882355,
"alnum_prop": 0.6945422535211268,
"repo_name": "ahdinosaur/localbitcoins",
"id": "4ef5ce845b675e7db782c767a3fa36fe2f04bfaf",
"size": "1136",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/localbitcoins/wallets/blockchain/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21460"
}
],
"symlink_target": ""
} |
"""add sample and identifier history
Revision ID: b4f6eb55d503
Revises: 4aa79d5ac86e
Create Date: 2018-08-31 13:26:22.482944
"""
import model.utils
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "b4f6eb55d503"
down_revision = "4aa79d5ac86e"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"biobank_order_identifier_history",
sa.Column("system", sa.String(length=80), nullable=False),
sa.Column("value", sa.String(length=80), nullable=False),
sa.Column("version", sa.Integer(), nullable=False),
sa.Column("biobank_order_id", sa.String(length=80), nullable=False),
sa.ForeignKeyConstraint(["biobank_order_id"], ["biobank_order.biobank_order_id"]),
sa.PrimaryKeyConstraint("system", "value", "version"),
)
op.create_table(
"biobank_ordered_sample_history",
sa.Column("test", sa.String(length=80), nullable=False),
sa.Column("description", sa.UnicodeText(), nullable=False),
sa.Column("processing_required", sa.Boolean(), nullable=False),
sa.Column("collected", model.utils.UTCDateTime(), nullable=True),
sa.Column("processed", model.utils.UTCDateTime(), nullable=True),
sa.Column("finalized", model.utils.UTCDateTime(), nullable=True),
sa.Column("version", sa.Integer(), nullable=False),
sa.Column("order_id", sa.String(length=80), nullable=False),
sa.ForeignKeyConstraint(["order_id"], ["biobank_order.biobank_order_id"]),
sa.PrimaryKeyConstraint("order_id", "test", "version"),
)
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("biobank_ordered_sample_history")
op.drop_table("biobank_order_identifier_history")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"content_hash": "2eb65ac438c79b6c836b2a742b320847",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 90,
"avg_line_length": 33.91428571428571,
"alnum_prop": 0.6575400168491997,
"repo_name": "all-of-us/raw-data-repository",
"id": "39a54e5eff9703ed770e731017c0dbd54e9f4b54",
"size": "2374",
"binary": false,
"copies": "1",
"ref": "refs/heads/devel",
"path": "rdr_service/alembic/versions/b4f6eb55d503_add_sample_and_identifier_history.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1866"
},
{
"name": "Mako",
"bytes": "1715"
},
{
"name": "Python",
"bytes": "17040924"
},
{
"name": "R",
"bytes": "2212"
},
{
"name": "Shell",
"bytes": "92213"
}
],
"symlink_target": ""
} |
import datetime
import operator
import sickbeard
from sickbeard import db
from sickbeard import helpers, logger, show_name_helpers
from sickbeard import providers
from sickbeard import search
from sickbeard import history
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality
from lib.tvdb_api import tvdb_api, tvdb_exceptions
from name_parser.parser import NameParser, InvalidNameException
class ProperFinder():
def __init__(self):
self.updateInterval = datetime.timedelta(hours=1)
def run(self):
if not sickbeard.DOWNLOAD_PROPERS:
return
# look for propers every night at 1 AM
updateTime = datetime.time(hour=1)
logger.log(u"Checking proper time", logger.DEBUG)
hourDiff = datetime.datetime.today().time().hour - updateTime.hour
# if it's less than an interval after the update time then do an update
if hourDiff >= 0 and hourDiff < self.updateInterval.seconds/3600:
logger.log(u"Beginning the search for new propers")
else:
return
propers = self._getProperList()
self._downloadPropers(propers)
def _getProperList(self):
propers = {}
# for each provider get a list of the propers
for curProvider in providers.sortedProviderList():
if not curProvider.isActive():
continue
date = datetime.datetime.today() - datetime.timedelta(days=2)
logger.log(u"Searching for any new PROPER releases from "+curProvider.name)
curPropers = curProvider.findPropers(date)
# if they haven't been added by a different provider than add the proper to the list
for x in curPropers:
name = self._genericName(x.name)
if not name in propers:
logger.log(u"Found new proper: "+x.name, logger.DEBUG)
x.provider = curProvider
propers[name] = x
# take the list of unique propers and get it sorted by
sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
finalPropers = []
for curProper in sortedPropers:
# parse the file name
try:
myParser = NameParser(False)
parse_result = myParser.parse(curProper.name)
except InvalidNameException:
logger.log(u"Unable to parse the filename "+curProper.name+" into a valid episode", logger.DEBUG)
continue
if not parse_result.episode_numbers:
logger.log(u"Ignoring "+curProper.name+" because it's for a full season rather than specific episode", logger.DEBUG)
continue
# populate our Proper instance
if parse_result.air_by_date:
curProper.season = -1
curProper.episode = parse_result.air_date
else:
curProper.season = parse_result.season_number if parse_result.season_number != None else 1
curProper.episode = parse_result.episode_numbers[0]
curProper.quality = Quality.nameQuality(curProper.name)
# for each show in our list
for curShow in sickbeard.showList:
if not parse_result.series_name:
continue
genericName = self._genericName(parse_result.series_name)
# get the scene name masks
sceneNames = set(show_name_helpers.makeSceneShowSearchStrings(curShow))
# for each scene name mask
for curSceneName in sceneNames:
# if it matches
if genericName == self._genericName(curSceneName):
logger.log(u"Successful match! Result "+parse_result.series_name+" matched to show "+curShow.name, logger.DEBUG)
# set the tvdbid in the db to the show's tvdbid
curProper.tvdbid = curShow.tvdbid
# since we found it, break out
break
# if we found something in the inner for loop break out of this one
if curProper.tvdbid != -1:
break
if curProper.tvdbid == -1:
continue
if not show_name_helpers.filterBadReleases(curProper.name):
logger.log(u"Proper "+curProper.name+" isn't a valid scene release that we want, igoring it", logger.DEBUG)
continue
# if we have an air-by-date show then get the real season/episode numbers
if curProper.season == -1 and curProper.tvdbid:
showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid)
if not showObj:
logger.log(u"This should never have happened, post a bug about this!", logger.ERROR)
raise Exception("BAD STUFF HAPPENED")
tvdb_lang = showObj.lang
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if tvdb_lang and not tvdb_lang == 'en':
ltvdb_api_parms['language'] = tvdb_lang
try:
t = tvdb_api.Tvdb(**ltvdb_api_parms)
epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0]
curProper.season = int(epObj["seasonnumber"])
curProper.episodes = [int(epObj["episodenumber"])]
except tvdb_exceptions.tvdb_episodenotfound:
logger.log(u"Unable to find episode with date "+str(curProper.episode)+" for show "+parse_result.series_name+", skipping", logger.WARNING)
continue
# check if we actually want this proper (if it's the right quality)
sqlResults = db.DBConnection().select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.tvdbid, curProper.season, curProper.episode])
if not sqlResults:
continue
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality:
continue
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season, curProper.episode) not in map(operator.attrgetter('tvdbid', 'season', 'episode'), finalPropers):
logger.log(u"Found a proper that we need: "+str(curProper.name))
finalPropers.append(curProper)
return finalPropers
def _downloadPropers(self, properList):
for curProper in properList:
historyLimit = datetime.datetime.today() - datetime.timedelta(days=30)
# make sure the episode has been downloaded before
myDB = db.DBConnection()
historyResults = myDB.select(
"SELECT resource FROM history "
"WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? "
"AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED]) + ")",
[curProper.tvdbid, curProper.season, curProper.episode, curProper.quality, historyLimit.strftime(history.dateFormat)])
# if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
if len(historyResults) == 0:
logger.log(u"Unable to find an original history entry for proper "+curProper.name+" so I'm not downloading it.")
continue
else:
# make sure that none of the existing history downloads are the same proper we're trying to download
isSame = False
for curResult in historyResults:
# if the result exists in history already we need to skip it
if self._genericName(curResult["resource"]) == self._genericName(curProper.name):
isSame = True
break
if isSame:
logger.log(u"This proper is already in history, skipping it", logger.DEBUG)
continue
# get the episode object
showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid)
if showObj == None:
logger.log(u"Unable to find the show with tvdbid "+str(curProper.tvdbid)+" so unable to download the proper", logger.ERROR)
continue
epObj = showObj.getEpisode(curProper.season, curProper.episode)
# make the result object
result = curProper.provider.getResult([epObj])
result.url = curProper.url
result.name = curProper.name
result.quality = curProper.quality
# snatch it
downloadResult = search.snatchEpisode(result, SNATCHED_PROPER)
return downloadResult
def _genericName(self, name):
return name.replace(".", " ").replace("-"," ").replace("_"," ").lower()
| {
"content_hash": "815a1cce9d6bc234857b8eaf6ef97698",
"timestamp": "",
"source": "github",
"line_count": 225,
"max_line_length": 188,
"avg_line_length": 43.937777777777775,
"alnum_prop": 0.5762694719805787,
"repo_name": "Branlala/docker-sickbeardfr",
"id": "05e147212d841b34571422af8bdf6fb2dd3be2b9",
"size": "10651",
"binary": false,
"copies": "35",
"ref": "refs/heads/master",
"path": "sickbeard/sickbeard/properFinder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "83278"
},
{
"name": "CSS",
"bytes": "155616"
},
{
"name": "JavaScript",
"bytes": "248414"
},
{
"name": "Python",
"bytes": "8146521"
},
{
"name": "Ruby",
"bytes": "2461"
},
{
"name": "Shell",
"bytes": "8791"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_target_canonical_facts
deprecated:
removed_in: '2.13'
why: Deprecated in favour of C(_info) module.
alternative: Use M(vmware_target_canonical_info) instead.
short_description: Return canonical (NAA) from an ESXi host system
description:
- This module can be used to gather facts about canonical (NAA) from an ESXi host based on SCSI target ID.
version_added: "2.0"
author:
- Joseph Callen (@jcpowermac)
- Abhijeet Kasurde (@Akasurde)
notes:
requirements:
- Tested on vSphere 5.5 and 6.5
- PyVmomi installed
options:
target_id:
description:
- The target id based on order of scsi device.
- version 2.6 onwards, this parameter is optional.
required: False
type: int
cluster_name:
description:
- Name of the cluster.
- Facts about all SCSI devices for all host system in the given cluster is returned.
- This parameter is required, if C(esxi_hostname) is not provided.
version_added: 2.6
type: str
esxi_hostname:
description:
- Name of the ESXi host system.
- Facts about all SCSI devices for the given ESXi host system is returned.
- This parameter is required, if C(cluster_name) is not provided.
version_added: 2.6
type: str
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Get Canonical name of particular target on particular ESXi host system
vmware_target_canonical_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
target_id: 7
esxi_hostname: esxi_hostname
delegate_to: localhost
- name: Get Canonical name of all target on particular ESXi host system
vmware_target_canonical_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
delegate_to: localhost
- name: Get Canonical name of all ESXi hostname on particular Cluster
vmware_target_canonical_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: '{{ cluster_name }}'
delegate_to: localhost
'''
RETURN = r"""
canonical:
description: metadata about SCSI Target device
returned: if host system and target id is given
type: str
sample: "mpx.vmhba0:C0:T0:L0"
scsi_tgt_facts:
description: metadata about all SCSI Target devices
returned: if host system or cluster is given
type: dict
sample: {
"DC0_C0_H0": {
"scsilun_canonical": {
"key-vim.host.ScsiDisk-0000000000766d686261303a303a30": "mpx.vmhba0:C0:T0:L0",
"key-vim.host.ScsiLun-0005000000766d686261313a303a30": "mpx.vmhba1:C0:T0:L0"
},
"target_lun_uuid": {
"0": "key-vim.host.ScsiDisk-0000000000766d686261303a303a30"
}
},
"DC0_C0_H1": {
"scsilun_canonical": {
"key-vim.host.ScsiDisk-0000000000766d686261303a303a30": "mpx.vmhba0:C0:T0:L0",
"key-vim.host.ScsiLun-0005000000766d686261313a303a30": "mpx.vmhba1:C0:T0:L0"
},
"target_lun_uuid": {
"0": "key-vim.host.ScsiDisk-0000000000766d686261303a303a30"
}
},
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec
class ScsiTargetFactsManager(PyVmomi):
def __init__(self, module):
super(ScsiTargetFactsManager, self).__init__(module)
cluster_name = self.module.params.get('cluster_name')
self.esxi_hostname = self.module.params.get('esxi_hostname')
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=self.esxi_hostname)
def gather_scsi_device_facts(self):
"""
Function to gather facts about SCSI target devices
"""
scsi_tgt_facts = {}
target_lun_uuid = {}
scsilun_canonical = {}
target_id = self.module.params['target_id']
for host in self.hosts:
# Associate the scsiLun key with the canonicalName (NAA)
for scsilun in host.config.storageDevice.scsiLun:
scsilun_canonical[scsilun.key] = scsilun.canonicalName
# Associate target number with LUN uuid
for target in host.config.storageDevice.scsiTopology.adapter[0].target:
for lun in target.lun:
target_lun_uuid[target.target] = lun.scsiLun
scsi_tgt_facts[host.name] = dict(scsilun_canonical=scsilun_canonical,
target_lun_uuid=target_lun_uuid)
if target_id is not None and self.esxi_hostname is not None:
canonical = ''
temp_lun_data = scsi_tgt_facts[self.esxi_hostname]['target_lun_uuid']
if self.esxi_hostname in scsi_tgt_facts and \
target_id in temp_lun_data:
temp_scsi_data = scsi_tgt_facts[self.esxi_hostname]['scsilun_canonical']
temp_target = temp_lun_data[target_id]
canonical = temp_scsi_data[temp_target]
self.module.exit_json(changed=False, canonical=canonical)
self.module.exit_json(changed=False, scsi_tgt_facts=scsi_tgt_facts)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
dict(
target_id=dict(required=False, type='int'),
cluster_name=dict(type='str', required=False),
esxi_hostname=dict(type='str', required=False),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'esxi_hostname'],
],
supports_check_mode=True,
)
scsi_tgt_manager = ScsiTargetFactsManager(module)
scsi_tgt_manager.gather_scsi_device_facts()
if __name__ == '__main__':
main()
| {
"content_hash": "3a11ab463b8e8d9ff92941c6d91dc053",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 110,
"avg_line_length": 33.85245901639344,
"alnum_prop": 0.6358353510895883,
"repo_name": "thaim/ansible",
"id": "629c8e82eead49be2ae784e9a568b06e9b678a52",
"size": "6428",
"binary": false,
"copies": "20",
"ref": "refs/heads/fix-broken-link",
"path": "lib/ansible/modules/cloud/vmware/_vmware_target_canonical_facts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
} |
import os
import World, Globals
import RoomInit, MobInit, Objects
import cInteractions
# checks for a previous set of server data, and if it is present, load it.
def clientDataLoad(client, CLIENT_LIST, CLIENT_DATA, TIMERS, kind):
"""
Loads the information in the client file to CLIENT_DATA
"""
clientDataID = str(client.addrport())
name = CLIENT_DATA[clientDataID].name
fileData = []
path = 'data/client/' + name + '/' + name
try:
with open(path, 'r') as CD:
fileData = CD.readlines()
except:
print "!! " + path + " not found, " + name + " failed to load."
clientName = ''
op = False
prompt = ''
clientID = None
title = ''
description = ''
currentRoomString = ''
hp = None
maxHp = None
pp = None
maxPp = None
level = None
exp = None
money = None
offense = None
defense = None
speed = None
guts = None
luck = None
vitality = None
IQ = None
inventorySize = None
inventoryItems = []
gameState = None
currentRoomRoom = None
battleRoom = None
battleRoomAttachedTo = None
battleCommands = None
rewardExp = 0
rewardMoney = 0
expToLevel = 0
equipItems = ''
newItemsList = []
#print fileData
for data in fileData:
if data.startswith('name='):
clientName = data[5:-1]
if data.startswith('op='):
op = data[3:-1]
if data.startswith("prompt="):
prompt = data[7:-1]
if data.startswith('gameState='):
gameState = data[10:-1]
if data.startswith("battleRoom="):
battleRoom = data[11:-1]
if data.startswith("battleRoom.attachedTo="):
battleRoomAttachedTo = data[22:-1]
battleRoomAttachedTo = battleRoomAttachedTo.split(":")
if data.startswith("clientID="):
clientID= data[9:-1]
if data.startswith("title="):
title = data[6:-1]
if data.startswith("description="):
description = data[12:-1]
if data.startswith("currentRoom="):
currentRoomString = data[12:-1]
#print "crs=" + currentRoomString
#print currentRoomString
if currentRoomString == '':
currentRoomString = Globals.startingRoom.region + ":" + Globals.startingRoom.name
currentRoomCoord = currentRoomString.split(":")
#print str(currentRoomCoord)
for region in Globals.regionListDict:
for room in Globals.regionListDict[region]:
if room == currentRoomCoord[1]:
currentRoomRoom = Globals.regionListDict[currentRoomCoord[0]][currentRoomCoord[1]]
#print currentRoomRoom.name
if data.startswith("hp="):
hp = int(data[3:-1])
if data.startswith("maxHp="):
maxHp = int(data[6:-1])
if data.startswith("pp="):
pp = int(data[3:-1])
if data.startswith("maxPp="):
maxPp = int(data[6:-1])
if data.startswith("level="):
level = int(data[6:-1])
if data.startswith("exp="):
exp = int(data[4:-1])
if data.startswith("expToLevel="):
expToLevel = int(data[11:-1])
if data.startswith("money="):
money = int(data[6:-1])
if data.startswith("rewardExp="):
rewardExp = int(data[10:-1])
if data.startswith("rewardMoney="):
rewardMoney = int(data[12:-1])
if data.startswith("offense="):
offense = int(data[8:-1])
if data.startswith("defense="):
defense = int(data[8:-1])
if data.startswith("speed="):
speed = int(data[6:-1])
if data.startswith("guts="):
guts = int(data[5:-1])
if data.startswith("luck="):
luck = int(data[5:-1])
if data.startswith("vitality="):
vitality = int(data[9:-1])
if data.startswith("IQ="):
IQ = int(data[3:-1])
if data.startswith("battleCommands="):
battleCommands = data[15:-1]
battleCommands = battleCommands.split(', ')
if data.startswith("inventorySize="):
inventorySize = int(data[14:-1])
if data.startswith("inventory="):
inventory = data[10:-1]
inventory = inventory.split(", ")
if os.path.exists('data/client/' + name + '/inv_equip/'):
fileList = os.listdir('data/client/' + name + '/inv_equip/')
for equip in fileList:
Objects.buildEquipmentFromFile(equip, 'data/client/' + name + '/inv_equip/')
#print inventory
for item in inventory:
if item != '':
#print 'item:' + str(item)
found = False
for obj in Globals.fromFileList:
#print obj.name
if item == obj.name and found == False:
#inventoryItems.append(obj)
newItem = cmdSpawnObject(obj.name, currentRoomRoom, alert=False, whereFrom='playerinv')
inventoryItems.append(newItem)
newItemsList.append(newItem)
#currentRoomRoom.objects.remove(newItem)
found = True
#print 'invI:' + str(inventoryItems)
for obj in Globals.equipmentFromFile:
#print item
#print obj.ID
if item == str(obj.ID) and found == False:
newItem = cmdSpawnObject(obj.ID, currentRoomRoom, alert=False, whereFrom='playerinv')
inventoryItems.append(newItem)
newItemsList.append(newItem)
#print 'invit:' + str(inventoryItems)
#print currentRoomRoom.objects
#currentRoomRoom.objects.remove(newItem)
found = True
#print 'invE:' + str(inventoryItems)
else:
inventory.remove(item)
if data.startswith("equipment="):
equipItems = data[10:-1]
equipItems = equipItems.split(", ")
#print Globals.startingRoom.players
#print Globals.startingRoom
newAvatar = World.Player(description=description, currentRoom=currentRoomRoom, name=clientName, client=client, clientDataID=clientDataID, title=title, rewardExp=rewardExp, rewardMoney=rewardMoney)
newMortal = World.mortal(hp, maxHp, pp, maxPp, level, exp, money, offense, defense, speed, guts, luck, vitality, IQ, [])
newAvatar.expToLevel = expToLevel
newMortal.inventory = []
# print newAvatar.currentRoom.players
# print newAvatar.currentRoom.name
# print Globals.startingRoom.players
# print Globals.startingRoom.name
#print newAvatar.currentRoom
#print 'morinv:' + str(newMortal.inventory)
CLIENT_DATA[clientDataID].name = clientName
CLIENT_DATA[clientDataID].op = op
CLIENT_DATA[clientDataID].prompt = prompt
CLIENT_DATA[clientDataID].clientID = clientID
CLIENT_DATA[clientDataID].avatar = newAvatar
CLIENT_DATA[clientDataID].avatar.kind = newMortal
CLIENT_DATA[clientDataID].gameState = gameState
CLIENT_DATA[clientDataID].avatar.battleCommands = battleCommands
#print "********" + str(inventoryItems)
if battleRoom != 'None' and battleRoom != '' and battleRoom != None:
newBattleRoom = RoomInit.loadBattleRoom('battles/'+str(battleRoom))
newBattleRoom.attachedTo = Globals.regionListDict[battleRoomAttachedTo[0]][battleRoomAttachedTo[1]]
Globals.battleRooms.append(newBattleRoom)
CLIENT_DATA[clientDataID].battleRoom = newBattleRoom
currentRoomRoom = newBattleRoom
for room in Globals.battleRooms:
if room.name == currentRoomCoord[1]:
currentRoomRoom = room
for item in inventoryItems:
CLIENT_DATA[clientDataID].avatar.kind.inventory.append(item)
# print item.name
# removed = False
# newItem = cmdSpawnObject(item.name, CLIENT_DATA[clientDataID].avatar.currentRoom, alert=False, whereFrom='inv')
# CLIENT_DATA[clientDataID].avatar.kind.inventory.append(newItem)
# print newItem.name
# for item in CLIENT_DATA[clientDataID].avatar.currentRoom.objects:
# print item.name
# if item.name == newItem.name and removed == False:
# CLIENT_DATA[clientDataID].avatar.currentRoom.objects.remove(item)
# removed = True
#CLIENT_DATA[clientDataID].avatar.kind.inventory = inventoryItems
#CLIENT_DATA[clientDataID].avatar.currentRoom.players.append(newAvatar)
CLIENT_DATA[clientDataID].avatar.kind.hp = hp
CLIENT_DATA[clientDataID].avatar.kind.exp = exp
CLIENT_DATA[clientDataID].avatar.kind.inventorySize = inventorySize
CLIENT_DATA[clientDataID].avatar.currentRoom = currentRoomRoom
#print 'avcr:' +str(CLIENT_DATA[clientDataID].avatar.currentRoom.name)
# for obj in CLIENT_DATA[clientDataID].avatar.currentRoom.objects:
# print obj.currentRoom.name
#print 'gsrp:'+str(Globals.startingRoom.players)
# for item in inventoryItems:
# newItem = cmdSpawnObject(item, CLIENT_DATA[clientDataID].avatar.currentRoom, alert=False, whereFrom='inv')
# CLIENT_DATA[clientDataID].avatar.currentRoom.objects.remove(newItem)
#print '&&:' + str(CLIENT_DATA[clientDataID].avatar.currentRoom.name) + " " + str(CLIENT_DATA[clientDataID].avatar.currentRoom) + ":"+ str(Globals.startingRoom)
#print '&&' + str(CLIENT_DATA[clientDataID].avatar.currentRoom.players)
#print str(Globals.startingRoom.players)
CLIENT_DATA[clientDataID].avatar.currentRoom.players.append(CLIENT_DATA[clientDataID].avatar)
#print CLIENT_DATA[clientDataID].avatar.kind.inventory
# print str(Globals.startingRoom.players)
# print '&&' + str(CLIENT_DATA[clientDataID].avatar.currentRoom.players)
# print str(CLIENT_DATA[clientDataID].avatar.currentRoom) + ":" + str(Globals.startingRoom)
# print 'acrpl:' + str(CLIENT_DATA[clientDataID].avatar.currentRoom.players)
# print 'avcr:' +str(CLIENT_DATA[clientDataID].avatar.currentRoom.name)
# print Globals.startingRoom.name
# print Globals.startingRoom.players
# #Globals.startingRoom.players.remove(CLIENT_DATA[clientDataID].avatar)
# print 'acrpl:' + str(CLIENT_DATA[clientDataID].avatar.currentRoom.players)
# print 'avcr:' +str(CLIENT_DATA[clientDataID].avatar.currentRoom.name)
# print Globals.startingRoom.name
# print Globals.startingRoom.players
# print 'loaded:' + str(CLIENT_DATA[clientDataID].avatar.currentRoom.players) + " in:" + str(CLIENT_DATA[clientDataID].avatar.currentRoom.name)
# print 'av:'+str(CLIENT_DATA[clientDataID].avatar.currentRoom.name)+ ' avp:'+ str(CLIENT_DATA[clientDataID].avatar.currentRoom.players)+' sr:' + str(Globals.startingRoom.name) + ' srp:'+ str(Globals.startingRoom.players)
# print dir(CLIENT_DATA[clientDataID].avatar.kind)
# CLIENT_LIST.remove(client)
# CLIENT_LIST.append(client)
#print equipItems
#print Globals.equipmentFromFile
# for item in Globals.equipmentFromFile:
# print item.ID
if equipItems != ['']:
for item in equipItems:
item = item.split(":")
slot = item[0]
gear = item[1]
eq = None
for item in currentRoomRoom.objects:
#print item.ID, item.name
#print gear
if item.ID == gear:
eq = item
# CLIENT_DATA[clientDataID].avatar.kind.inventory.append(item)
# Globals.equipmentFromFile.remove(item)
#print "found equipment by name " + str(equipment)
elif item.name == gear:
eq = item
# CLIENT_DATA[clientDataID].avatar.kind.inventory.append(item)
#print "found equipment by ID " + str(equipment)
args = [eq.name]
#print "args=" + str(args)
# CLIENT_DATA[clientDataID].avatar.kind.equipment[slot] = equipment
cInteractions.equip(CLIENT_DATA[clientDataID].client, args, CLIENT_LIST, clientDataID, CLIENT_DATA)
#print CLIENT_DATA[clientDataID].avatar.kind.equipment
for item in newItemsList:
for obj in currentRoomRoom.objects:
if obj == item:
currentRoomRoom.objects.remove(obj)
#print "removed " + str(obj)
def clientDataSave(client, CLIENT_LIST, CLIENT_DATA, TIMERS):
clientDataID = str(client.addrport())
#print CLIENT_DATA
#print "cdi"+clientDataID
player = CLIENT_DATA[clientDataID]
CLIENT = clientDataID
if not os.path.exists('data'):
os.mkdir('data')
if not os.path.exists('data/world'):
os.mkdir('data/world')
if not os.path.exists('data/world/battles'):
os.mkdir('data/world/battles')
if not os.path.exists('data/client/' + player.name + '/'):
os.mkdir('data/client/' + player.name + '/')
fileList = []
if not os.path.exists('data/client/' + str(player.name) + '/inv_equip/'):
os.mkdir('data/client/' + str(player.name) + '/inv_equip/')
fileList = os.listdir('data/client/' + str(player.name) + '/inv_equip/')
if fileList != []:
for file in fileList:
os.remove('data/client/' + str(player.name) + '/inv_equip/' + file)
try:
name = player.name
op = player.op
prompt = player.prompt
gameState = player.gameState
if player.battleRoom is not None:
battleRoom = str(player.battleRoom.name)
else:
battleRoom = ''
if player.battleRoom is not None:
attachedTo = player.battleRoom.attachedTo
else:
attachedTo = ''
#client = str(player.client) # should be recreated on reload, not saved
clientID = str(player.clientID)
avatar = player.avatar # should be recreated on reload, not saved
avatarName = avatar.name
title = avatar.title
description = player.avatar.description
currentRoom = str(player.avatar.currentRoom.region+":"+player.avatar.currentRoom.name)
#avatarClient = avatar.client
#clientDataID = avatar.clientDataID
kind = avatar.kind
hp = str(kind.base_hp)
maxHp = str(kind.base_maxHp)
pp = str(kind.base_pp)
maxPp = str(kind.base_maxPp)
level = str(kind.level)
exp = str(kind.exp)
money = str(kind.money)
offense = str(kind.base_offense)
defense = str(kind.base_defense)
speed = str(kind.base_speed)
guts = str(kind.base_guts)
luck = str(kind.base_luck)
vitality = str(kind.base_vitality)
IQ = str(kind.base_IQ)
battleCommands = avatar.battleCommands
inventory = kind.inventory
inventorySize = str(kind.inventorySize)
equipment = kind.equipment
rewardExp = str(avatar.rewardExp)
rewardMoney = str(avatar.rewardMoney)
expToLevel = str(avatar.expToLevel)
path = 'data/client/' + name +'/' + name
with open(path, 'w') as f:
f.write(str(player.password) + "\n")
f.write("name=" + name + "\n")
f.write("op=" + str(op) + "\n")
f.write("prompt=" + prompt + "\n")
#f.write("client=" + client + "\n")
f.write("gameState=" + gameState + "\n")
f.write("battleRoom=" + battleRoom + "\n")
if battleRoom != '':
f.write("battleRoom.attachedTo=" + attachedTo.region + ":" + attachedTo.name + "\n")
else:
f.write("battleRoom.attachedTo=\n")
f.write("clientID=" + clientID + "\n\n")
#f.write("avatar=" + avatar + "\n")
#f.write("avatarName=" + avatarName + "\n") #name and avatarName are the same
f.write("title=" + title + "\n")
f.write("description=" + description + "\n")
f.write("currentRoom=" + currentRoom + "\n\n")
f.write("hp=" + hp + "\n")
f.write("maxHp=" + maxHp + "\n")
f.write("pp=" + pp + "\n")
f.write("maxPp=" + maxPp + "\n")
f.write("level=" + level + "\n")
f.write("exp=" + exp + "\n")
f.write("expToLevel=" + expToLevel + "\n")
f.write("money=" + money + "\n")
f.write("rewardExp=" + rewardExp + "\n")
f.write("rewardMoney=" + rewardMoney + "\n")
f.write("offense=" + offense + "\n")
f.write("defense=" + defense + "\n")
f.write("speed=" + speed + "\n")
f.write("guts=" + guts + "\n")
f.write("luck=" + luck + "\n")
f.write("vitality=" + vitality + "\n")
f.write("IQ=" + IQ + "\n")
commandList = ''
for command in battleCommands:
commandList += (str(command) + ", ")
if str(commandList).endswith(", "):
commandList = commandList[:-2]
f.write("battleCommands=" + str(commandList) + "\n")
f.write("inventorySize=" + inventorySize + "\n")
f.write("\ninventory=")
fileString = ''
for item in inventory:
#print item
if hasattr(item.kind, 'equipment'):
# else:
# fileList = os.listdir('data/client/' + str(name) + '/inv_equip/')
# if fileList != []:
# for file in fileList:
# os.remove('data/client/' + str(name) + '/inv_equip/' + file)
Objects.saveEqToFile(item, 'data/client/' + str(name) + '/inv_equip/')
fileString = fileString + (str(item) + ", ")
else:
fileString = fileString + (item.name + ", ")
if fileString.endswith(', '):
fileString = fileString[:-2]
f.write(fileString)
f.write("\n")
# name = item.name
# currentRoom = str((item.currentRoom.region, item.currentRoom.name))
# isVisible = str(item.isVisible)
# if item.spawnContainer is not None:
# spawnContainer = item.spawnContainer.name
# else:
# spawnContainer = str(None)
# kind = item.kind
# f.write(str(location) + " " + name +".name=" + name + "\n")
# f.write(str(location) + " " + name + ".currentRoom=" + currentRoom + "\n")
# f.write(str(location) + " " + name + ".isVisible=" + isVisible + "\n")
# f.write(str(location) + " " + name + ".spawnContainer=" + spawnContainer + "\n")
# if isinstance(kind, World.item):
# isCarryable = str(kind.isCarryable)
# respawns = str(kind.respawns)
# if kind.itemGrabHandler is not None:
# itemGrabHandler = kind.itemGrabHandler
# if kind.objectSpawner is not None:
# objectSpawner = kind.objectSpawner
# f.write(str(location) + " " + name +".isCarryable=" + isCarryable + "\n")
# f.write(str(location) + " " + name +".respawns=" + respawns + "\n")
# if kind.itemGrabHandler is not None:
# notDroppable = str(itemGrabHandler.notDroppable)
# f.write(str(location) + " " + name +".notDroppable=" + notDroppable + "\n")
# if kind.objectSpawner is not None:
# time = str(objectSpawner.time)
# oddsList = str(objectSpawner.oddsList)
# cycles = str(objectSpawner.cycles)
# repeat = str(objectSpawner.repeat)
# startingLocation = str((objectSpawner.startingLocation[0].region, objectSpawner.startingLocation[0].name))
# f.write(str(location) + " " + name +".time=" + time + "\n")
# f.write(str(location) + " " + name +".oddsList=" + oddsList + "\n")
# f.write(str(location) + " " + name +".cycles=" + cycles + "\n")
# f.write(str(location) + " " + name +".repeat=" + repeat + "\n")
# f.write(str(location) + " " + name +".startingLocation=" + startingLocation + "\n")
# f.write("\n")
# if isinstance(kind, World.container): # containers should probably not be able to be picked up
# isLocked = str(kind.isLocked)
# isCarryable = str(kind.isCarryable)
# respawns = str(kind.respawns)
# respawnContents = str(kind.respawnContents)
# itemGrabHandler = kind.itemGrabHandler
# objectSpawner= kind.objectSpawner
# f.write(str(location) + " " + name +".isLocked=" + isLocked + "\n")
# f.write(str(location) + " " + name +".isCarryable=" + isCarryable + "\n")
# f.write(str(location) + " " + name +".respawns=" + respawns + "\n")
# f.write(str(location) + " " + name +".respawnContents" + respawnContents + "\n")
# if kind.itemGrabHandler is not None:
# notDroppable = str(itemGrabHandler.notDroppable)
# f.write(str(location) + " " + name +".notDroppable=" + notDroppable + "\n")
# if kind.objectSpawner is not None:
# time = str(objectSpawner.time)
# oddsList = str(objectSpawner.oddsList)
# cycles = str(objectSpawner.cycles)
# repeat = str(objectSpawner.repeat)
# startingLocation = str((objectSpawner.startingLocation[0].region, objectSpawner.startingLocation[0].name))
# f.write(str(location) + " " + name +".time=" + time + "\n")
# f.write(str(location) + " " + name +".oddsList=" + oddsList + "\n")
# f.write(str(location) + " " + name +".cycles=" + cycles + "\n")
# f.write(str(location) + " " + name +".repeat=" + repeat + "\n")
# f.write(str(location) + " " + name +".startingLocation=" + startingLocation + "\n")
# f.write("\n")
# location += 1
f.write("\nequipment=")
if player.avatar.kind.equipment != {}:
equipType = ''
fileString = ''
for key, value in player.avatar.kind.equipment.items():
fileString += str(key) + ":" + str(value) + ", "
if fileString.endswith(", "):
fileString = fileString[:-2]
f.write(fileString)
f.write("\n")
# if battleRoom != None and battleRoom != 'None' and battleRoom != '':
# RoomInit.saveRoom(player.battleRoom)
if battleRoom != '':
RoomInit.saveRoom(player.battleRoom)
if not os.path.exists('data/world/battles/mobs'):
os.mkdir('data/world/battles/mobs')
if not os.path.exists('data/world/battles/mobs/'+player.battleRoom.name+'/'):
os.mkdir('data/world/battles/mobs/'+player.battleRoom.name+'/')
for mob in player.battleRoom.mobs:
MobInit.saveMobToFile(mob, 'data/world/battles/mobs/'+player.battleRoom.name+'/')
except:
print "!! Failed to save CLIENT " + Globals.CLIENT_DATA[clientDataID].name
raise
def dataSave(CLIENT_LIST, CLIENT_DATA, TIMERS):
"""
Saves the CLIENT_DATA and TIMERS lists to data/server/
"""
#print CLIENT_DATA
try:
for client in CLIENT_LIST:
#print client
#print CLIENT_DATA
clientDataID = str(client.addrport())
clientDataSave(client, CLIENT_LIST, CLIENT_DATA, Globals.TIMERS)
# player = CLIENT_DATA[CLIENT]
# name = player.name
# prompt = player.prompt
# #client = str(player.client) # should be recreated on reload, not saved
# clientID = str(player.clientID)
# avatar = player.avatar # should be recreated on reload, not saved
# avatarName = avatar.name
# title = avatar.title
# description = player.avatar.description
# currentRoom = str(player.avatar.currentRoom.region+":"+player.avatar.currentRoom.name)
# #avatarClient = avatar.client
# #clientDataID = avatar.clientDataID
# kind = avatar.kind
# hp = str(kind.hp)
# exp = str(kind.exp)
# inventory = kind.inventory
# inventorySize = str(kind.inventorySize)
# equipment = kind.equipment
# path = 'data/client/' + name
# with open(path, 'w') as f:
# f.write(str(player.password) + "\n")
# f.write("name=" + name + "\n")
# f.write("prompt=" + prompt + "\n")
# #f.write("client=" + client + "\n")
# f.write("clientID=" + clientID + "\n\n")
# #f.write("avatar=" + avatar + "\n")
# #f.write("avatarName=" + avatarName + "\n") #name and avatarName are the same
# f.write("title=" + title + "\n")
# f.write("description=" + description + "\n")
# f.write("currentRoom=" + currentRoom + "\n\n")
# f.write("hp=" + hp + "\n")
# f.write("exp=" + exp + "\n")
# f.write("inventorySize=" + inventorySize + "\n\n")
# f.write("\nInventory: ")
# location = 1
# for item in inventory:
# f.write(item.name + ", ")
# f.write("\n")
# name = item.name
# currentRoom = str((item.currentRoom.region, item.currentRoom.name))
# isVisible = str(item.isVisible)
# if item.spawnContainer is not None:
# spawnContainer = item.spawnContainer.name
# else:
# spawnContainer = str(None)
# kind = item.kind
# f.write(str(location) + " " + name +".name=" + name + "\n")
# f.write(str(location) + " " + name + ".currentRoom=" + currentRoom + "\n")
# f.write(str(location) + " " + name + ".isVisible=" + isVisible + "\n")
# f.write(str(location) + " " + name + ".spawnContainer=" + spawnContainer + "\n")
# if isinstance(kind, World.item):
# isCarryable = str(kind.isCarryable)
# respawns = str(kind.respawns)
# if kind.itemGrabHandler is not None:
# itemGrabHandler = kind.itemGrabHandler
# if kind.objectSpawner is not None:
# objectSpawner = kind.objectSpawner
# f.write(str(location) + " " + name +".isCarryable=" + isCarryable + "\n")
# f.write(str(location) + " " + name +".respawns=" + respawns + "\n")
# if kind.itemGrabHandler is not None:
# notDroppable = str(itemGrabHandler.notDroppable)
# f.write(str(location) + " " + name +".notDroppable=" + notDroppable + "\n")
# if kind.objectSpawner is not None:
# time = str(objectSpawner.time)
# oddsList = str(objectSpawner.oddsList)
# cycles = str(objectSpawner.cycles)
# repeat = str(objectSpawner.repeat)
# startingLocation = str((objectSpawner.startingLocation[0].region, objectSpawner.startingLocation[0].name))
# f.write(str(location) + " " + name +".time=" + time + "\n")
# f.write(str(location) + " " + name +".oddsList=" + oddsList + "\n")
# f.write(str(location) + " " + name +".cycles=" + cycles + "\n")
# f.write(str(location) + " " + name +".repeat=" + repeat + "\n")
# f.write(str(location) + " " + name +".startingLocation=" + startingLocation + "\n")
# f.write("\n")
# if isinstance(kind, World.container): # containers should probably not be able to be picked up
# isLocked = str(kind.isLocked)
# isCarryable = str(kind.isCarryable)
# respawns = str(kind.respawns)
# respawnContents = str(kind.respawnContents)
# itemGrabHandler = kind.itemGrabHandler
# objectSpawner= kind.objectSpawner
# f.write(str(location) + " " + name +".isLocked=" + isLocked + "\n")
# f.write(str(location) + " " + name +".isCarryable=" + isCarryable + "\n")
# f.write(str(location) + " " + name +".respawns=" + respawns + "\n")
# f.write(str(location) + " " + name +".respawnContents" + respawnContents + "\n")
# if kind.itemGrabHandler is not None:
# notDroppable = str(itemGrabHandler.notDroppable)
# f.write(str(location) + " " + name +".notDroppable=" + notDroppable + "\n")
# if kind.objectSpawner is not None:
# time = str(objectSpawner.time)
# oddsList = str(objectSpawner.oddsList)
# cycles = str(objectSpawner.cycles)
# repeat = str(objectSpawner.repeat)
# startingLocation = str((objectSpawner.startingLocation[0].region, objectSpawner.startingLocation[0].name))
# f.write(str(location) + " " + name +".time=" + time + "\n")
# f.write(str(location) + " " + name +".oddsList=" + oddsList + "\n")
# f.write(str(location) + " " + name +".cycles=" + cycles + "\n")
# f.write(str(location) + " " + name +".repeat=" + repeat + "\n")
# f.write(str(location) + " " + name +".startingLocation=" + startingLocation + "\n")
# f.write("\n")
# location += 1
# f.write("\nEquipment:\n")
except:
print "!! Failed to save CLIENT " + CLIENT_DATA[clientDataID].name
raise
# try:
# CD = shelve.open('data/server/CLIENT_DATA', writeback=True)
# for CLIENT in CLIENT_DATA:
# #print 'client name:' + CLIENT_DATA[CLIENT].name
# clientKey = CLIENT_DATA[CLIENT].name
# CD[clientKey] = CLIENT
# except OSError:
# print "Failed to save CLIENT_DATA"
# return
#print TIMERS
try:
timerID = 0
with open('data/server/TIMERS', 'w') as TI:
for TIMER in Globals.TIMERS:
TI.write(str(timerID) + " time=" + str(TIMER.time) + "\n")
TI.write(str(timerID) + " actionFunction=" + str(TIMER.actionFunction) + "\n")
TI.write(str(timerID) + " actionArgs=" + str(TIMER.actionArgs) + "\n")
if hasattr(TIMER, 'attachedTo'):
if TIMER.attachedTo != None:
if hasattr(TIMER.attachedTo, 'owner'):
if TIMER.attachedTo.owner != None:
if hasattr(TIMER.attachedTo.owner, 'owner'):
TI.write(str(timerID) + " attachedTo=" + TIMER.attachedTo.owner.owner.name + "\n")
elif hasattr(TIMER.attachedTo, 'owner'):
TI.write(str(timerID) + " attachedTo=" + TIMER.attachedTo.owner.name + "\n")
TI.write(str(timerID) + " respawns=" + str(TIMER.respawns) + "\n")
TI.write(str(timerID) + " currentTime=" + str(TIMER.currentTime) + "\n")
TI.write(" \n")
timerID += 1
except:
raise
# try:
# TI = shelve.open('data/server/TIMERS', writeback=True)
# timerID = 0
# timerIDexists = 'timerID' in TI
# if timerIDexists:
# timerID = TI['timerID']
# else:
# timerID = 1
# for TIMER in TIMERS:
# print "timer:" + str(TIMER) + " timerID:" + str(timerID)
# timerKey = timerID
# TI[timerKey] = TIMER
# timerID += 1
# TI['timerID'] = timerID
# except OSError:
# print "Failed to save TIMERS"
# return
# try:
# CL = shelve.open('data/server/CLIENT_LIST', writeback=True)
# for CLIENT in CLIENT_LIST:
# clientKey = str(CLIENT.addrport())
# CL[clientKey] = CLIENT
# except OSError:
# print "Failed to save CLIENT_LIST"
# return
def cmdSpawnObject(refobj, spawnLocation, alert=True, active=False, whereFrom='cmd', spawnContainer=None):
# creates a new object based on the attributes of the object fed to the function
obj = None
# if whereFrom == 'cmd':
# active = True
#print Objects.fromFileList[0].name
#print str(refobj)
for thing in Globals.fromFileList:
if thing.name == str(refobj):
obj = thing
#print obj
for thing in Globals.equipmentFromFile:
if thing.ID == str(refobj):
obj = thing
if obj == None:
print ("%s not found." %refobj)
return
newObject = World.Object(obj.name, obj.description)
newObject.ID = obj.ID
newObject.currentRoom = spawnLocation
newObject.isVisible = obj.isVisible
if obj.spawnContainer:
newObject.spawnContainer = obj.spawnContainer
else:
newObject.spawnContainer = spawnContainer
newObject.longDescription = obj.longDescription
newObject.kind = obj.kind
if newObject.kind:
newObject.kind.owner = newObject
newObject.TIMERS = obj.TIMERS
# if newObject.TIMERS:
# newObject.TIMERS.owner = newObject
if newObject.kind is not None:
if isinstance(newObject.kind, World.item):
newObject.kind = World.item()
newObject.kind.owner = newObject
newObject.kind.isCarryable = obj.kind.isCarryable
newObject.kind.respawns = obj.kind.respawns
newObject.kind.itemGrabHandler = obj.kind.itemGrabHandler
if newObject.kind.itemGrabHandler:
newObject.kind.itemGrabHandler.owner = newObject.kind
newObject.kind.objectSpawner = obj.kind.objectSpawner
if newObject.kind.objectSpawner:
newObject.kind.objectSpawner.owner = newObject.kind
newObject.kind.equipment = obj.kind.equipment
newObject.kind.onUse = obj.kind.onUse
if isinstance(newObject.kind, World.container):
newObject.kind = World.container()
newObject.kind.owner = newObject
newObject.kind.inventory = []
newObject.kind.isLocked = obj.kind.isLocked
newObject.kind.isCarryable = obj.kind.isCarryable
newObject.kind.respawns = obj.kind.respawns
newObject.kind.respawnContents = obj.kind.respawnContents
newObject.kind.itemGrabHandler = obj.kind.itemGrabHandler
if newObject.kind.itemGrabHandler:
newObject.kind.itemGrabHandler.owner = newObject.kind
newObject.kind.objectSpawner = obj.kind.objectSpawner
if newObject.kind.objectSpawner:
newObject.kind.objectSpawner.owner = newObject.kind
if newObject.kind.itemGrabHandler:
newObject.kind.itemGrabHandler.notDroppable = obj.kind.itemGrabHandler.notDroppable
if newObject.kind.objectSpawner:
newObject.kind.objectSpawner = World.objectSpawner(newObject.kind)
newObject.kind.objectSpawner.TIMERS = obj.kind.objectSpawner.TIMERS
newObject.kind.objectSpawner.time = obj.kind.objectSpawner.time
newObject.kind.objectSpawner.obj = obj.kind.objectSpawner.obj
newObject.kind.objectSpawner.oddsList = obj.kind.objectSpawner.oddsList
newObject.kind.objectSpawner.container = obj.kind.objectSpawner.container
newObject.kind.objectSpawner.cycles = obj.kind.objectSpawner.cycles
newObject.kind.objectSpawner.repeat = obj.kind.objectSpawner.repeat
newObject.kind.objectSpawner.timer = World.Timer(newObject.kind.objectSpawner.TIMERS, newObject.kind.objectSpawner.time, newObject.kind.objectSpawner.spawn, [], newObject.kind.objectSpawner, newObject.kind.respawns)
newObject.kind.objectSpawner.startingLocation = spawnLocation,
if newObject.kind.equipment:
newObject.kind.equipment.owner = obj.kind.equipment.owner
newObject.kind.equipment.weapon = obj.kind.equipment.weapon
newObject.kind.equipment.armor = obj.kind.equipment.armor
newObject.kind.equipment.slot = obj.kind.equipment.slot
newObject.kind.equipment.durability = obj.kind.equipment.durability
newObject.kind.equipment.maxDurability = obj.kind.equipment.maxDurability
newObject.kind.equipment.worth = obj.kind.equipment.worth
newObject.kind.equipment.hp = obj.kind.equipment.hp
newObject.kind.equipment.pp = obj.kind.equipment.pp
newObject.kind.equipment.offense = obj.kind.equipment.offense
newObject.kind.equipment.defense = obj.kind.equipment.defense
newObject.kind.equipment.speed = obj.kind.equipment.speed
newObject.kind.equipment.guts = obj.kind.equipment.guts
newObject.kind.equipment.luck = obj.kind.equipment.luck
newObject.kind.equipment.vitality = obj.kind.equipment.vitality
newObject.kind.equipment.IQ = obj.kind.equipment.IQ
newObject.kind.equipment.statusEffect = obj.kind.equipment.statusEffect
newObject.kind.equipment.battleCommands = obj.kind.equipment.battleCommands
newObject.kind.equipment.onUse = obj.kind.equipment.onUse
if newObject.kind:
if newObject.kind.objectSpawner:
# print "has object spawner"
newObject.kind.objectSpawner.active = active # set the spawned object to active
#print "active:" + str(newObject.kind.objectSpawner.active)
spawnLocation.objects.append(newObject)
symbol = '+'
if whereFrom == 'cmd':
symbol = 's'
elif whereFrom == 'objSpawner':
symbol = '$'
elif whereFrom == 'inv':
symbol = 'i'
if newObject.kind:
if newObject.kind.objectSpawner:
print symbol +"o " + str(newObject) +": " + newObject.name + " @ [" + str(newObject.currentRoom.region) + ":" + str(newObject.currentRoom.name) + "] (active=" + str(newObject.kind.objectSpawner.active) +")"
else:
print symbol +"o " + str(newObject) +": " + newObject.name + " @ [" + str(newObject.currentRoom.region) + ":" + str(newObject.currentRoom.name) + "]"
else:
print symbol +"o " + str(newObject) +": " + newObject.name + " @ [" + str(newObject.currentRoom.region) + ":" + str(newObject.currentRoom.name) + "]"
for client in Globals.CLIENT_LIST:
if Globals.CLIENT_DATA[str(client.addrport())].avatar is not None:
if Globals.CLIENT_DATA[str(client.addrport())].avatar.currentRoom == newObject.currentRoom: # if a client is in the room object just appeared in, let it know
if alert==True:
client.send_cc("^BA %s appeared.^~\n" %newObject.name)
return newObject
# makes sure required directories exist, and if not, it creates them
path = [str("data/server/"), "data/client/", "data/log/auth/"]
for pathname in path:
try:
os.makedirs(pathname)
except OSError:
if not os.path.isdir(pathname):
raise | {
"content_hash": "4112495c905a588a2d83feff9509450a",
"timestamp": "",
"source": "github",
"line_count": 943,
"max_line_length": 227,
"avg_line_length": 36.6882290562036,
"alnum_prop": 0.6545943289880626,
"repo_name": "buckets1337/MotherMUD",
"id": "d3883d57659b400d2da12c5014a351971f1c1560",
"size": "34658",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SysInit.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1568859"
}
],
"symlink_target": ""
} |
"""
Exponentiate quickly, using the binary exponentiation algorithm.
Also known as exponentiation by squaring, or square-and-multiply.
Author:
Christos Nitsas
(nitsas)
(chrisnitsas)
Language:
Python 3(.4)
Date:
October, 2014
"""
__all__ = ['binary_exponentiation', 'power']
def binary_exponentiation(num, power):
"""
Calculate num**power quickly (via binary exponentiation).
num -- a number
power -- an integer
Also known as exponentiation by squaring, or square-and-multiply.
"""
# check for valid input:
if not isinstance(power, int):
raise ValueError("power must be an integer")
if power < 0:
raise ValueError("power must be non-negative")
# instantly take care of easy cases:
if num == 0:
return 0
if num == -1 and power % 2 != 0:
return -1
if abs(num) == 1 or power == 0:
return 1
# calculate using the recursive helper function:
return _recurse_binary_exponentiation(num, power)
power = binary_exponentiation
def _recurse_binary_exponentiation(num, power):
"""
Recursively calculate num**power quickly (via binary exponentiation).
Helper function. We did parameter checks before so that we don't have to
do them inside every recursive call.
"""
if power == 1:
return num
num_squared = num * num
if power % 2 == 0:
# power was even
return _recurse_binary_exponentiation(num_squared, power // 2)
else:
# power was odd
return num * _recurse_binary_exponentiation(num_squared, power // 2)
| {
"content_hash": "e1d28c550d528e22f82514ab6b4e5b02",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 77,
"avg_line_length": 24.70769230769231,
"alnum_prop": 0.6419676214196762,
"repo_name": "nitsas/py3algs",
"id": "f868e454d49f6ded77bbbb5752d3bd6f21d8c6ea",
"size": "1606",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py3algs/algorithms/binary_exponentiation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "84089"
}
],
"symlink_target": ""
} |
import re
from nova import exception
# Define the minimum and maximum version of the API across all of the
# REST API. The format of the version is:
# X.Y where:
#
# - X will only be changed if a significant backwards incompatible API
# change is made which affects the API as whole. That is, something
# that is only very very rarely incremented.
#
# - Y when you make any change to the API. Note that this includes
# semantic changes which may not affect the input or output formats or
# even originate in the API code layer. We are not distinguishing
# between backwards compatible and backwards incompatible changes in
# the versioning system. It must be made clear in the documentation as
# to what is a backwards compatible change and what is a backwards
# incompatible one.
#
# You must update the API version history string below with a one or
# two line description as well as update rest_api_version_history.rst
REST_API_VERSION_HISTORY = """REST API Version History:
* 2.1 - Initial version. Equivalent to v2.0 code
* 2.2 - Adds (keypair) type parameter for os-keypairs plugin
Fixes success status code for create/delete a keypair method
* 2.3 - Exposes additional os-extended-server-attributes
Exposes delete_on_termination for os-extended-volumes
* 2.4 - Exposes reserved field in os-fixed-ips.
* 2.5 - Allow server search option ip6 for non-admin
* 2.6 - Consolidate the APIs for getting remote consoles
* 2.7 - Check flavor type before add tenant access.
* 2.8 - Add new protocol for VM console (mks)
"""
# The minimum and maximum versions of the API supported
# The default api version request is definied to be the
# the minimum version of the API supported.
# Note(cyeoh): This only applies for the v2.1 API once microversions
# support is fully merged. It does not affect the V2 API.
_MIN_API_VERSION = "2.1"
_MAX_API_VERSION = "2.8"
DEFAULT_API_VERSION = _MIN_API_VERSION
# NOTE(cyeoh): min and max versions declared as functions so we can
# mock them for unittests. Do not use the constants directly anywhere
# else.
def min_api_version():
return APIVersionRequest(_MIN_API_VERSION)
def max_api_version():
return APIVersionRequest(_MAX_API_VERSION)
class APIVersionRequest(object):
"""This class represents an API Version Request with convenience
methods for manipulation and comparison of version
numbers that we need to do to implement microversions.
"""
def __init__(self, version_string=None):
"""Create an API version request object."""
self.ver_major = None
self.ver_minor = None
if version_string is not None:
match = re.match(r"^([1-9]\d*)\.([1-9]\d*|0)$",
version_string)
if match:
self.ver_major = int(match.group(1))
self.ver_minor = int(match.group(2))
else:
raise exception.InvalidAPIVersionString(version=version_string)
def __str__(self):
"""Debug/Logging representation of object."""
return ("API Version Request Major: %s, Minor: %s"
% (self.ver_major, self.ver_minor))
def is_null(self):
return self.ver_major is None and self.ver_minor is None
def __cmp__(self, other):
if not isinstance(other, APIVersionRequest):
raise TypeError
return cmp((self.ver_major, self.ver_minor),
(other.ver_major, other.ver_minor))
def matches(self, min_version, max_version):
"""Returns whether the version object represents a version
greater than or equal to the minimum version and less than
or equal to the maximum version.
@param min_version: Minimum acceptable version.
@param max_version: Maximum acceptable version.
@returns: boolean
If min_version is null then there is no minimum limit.
If max_version is null then there is no maximum limit.
If self is null then raise ValueError
"""
if self.is_null():
raise ValueError
if max_version.is_null() and min_version.is_null():
return True
elif max_version.is_null():
return min_version <= self
elif min_version.is_null():
return self <= max_version
else:
return min_version <= self <= max_version
def get_string(self):
"""Converts object to string representation which if used to create
an APIVersionRequest object results in the same version request.
"""
if self.is_null():
raise ValueError
return "%s.%s" % (self.ver_major, self.ver_minor)
| {
"content_hash": "8a2f33d8f98a5d83f014618e94649041",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 79,
"avg_line_length": 37.95161290322581,
"alnum_prop": 0.6638334041648959,
"repo_name": "zaina/nova",
"id": "ccbaa185f6a819799292c0658c5c8824bab9b2f7",
"size": "5308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/api/openstack/api_version_request.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16279089"
},
{
"name": "Shell",
"bytes": "20716"
},
{
"name": "Smarty",
"bytes": "277799"
}
],
"symlink_target": ""
} |
"""
cpsdirector.user
================
ConPaaS director: users and authentication handling
:copyright: (C) 2013 by Contrail Consortium.
"""
from flask import Blueprint
from flask import jsonify, helpers, request, make_response, g
import os
import hashlib
import zipfile
import simplejson
from datetime import datetime
from functools import wraps
from StringIO import StringIO
from OpenSSL import crypto
from cpsdirector import db, x509cert
from cpsdirector.common import log, log_error, config_parser, build_response
from cpsdirector.common import error_response
from conpaas.core import https
default_max_credit = 50
user_page = Blueprint('user_page', __name__)
class cert_required(object):
def __init__(self, role):
self.role = role
def __call__(self, fn):
@wraps(fn)
def decorated(*args, **kwargs):
g.cert = {}
if os.environ.get('DIRECTOR_TESTING'):
# No SSL certificate check if we are testing. Trust what the
# client is sending.
g.cert['UID'] = request.values.get('uid')
g.cert['role'] = request.values.get('role')
# g.cert['serviceLocator'] = request.values.get('sid')
g.cert['serviceLocator'] = request.values.get('aid')
else:
cert = request.environ['SSL_CLIENT_CERT']
for key in 'serviceLocator', 'UID', 'role':
g.cert[key] = https.x509.get_x509_dn_field(cert, key)
try:
uid = int(g.cert['UID'])
except (AttributeError, ValueError, TypeError):
error_msg = 'cert_required: client certificate does NOT provide UID'
log(error_msg)
return make_response(error_msg, 401)
# Getting user data from DB
g.user = User.query.get(uid)
if not g.user:
# authentication failed
return build_response(simplejson.dumps(False))
if self.role == 'manager':
# manager cert required
try:
service_locator = int(g.cert['serviceLocator'])
except (AttributeError, ValueError):
error_msg = 'cert_required: client certificate does NOT provide serviceLocator'
log(error_msg)
# Return HTTP_UNAUTHORIZED
return make_response(error_msg, 401)
# check if the service is actually owned by the user
# from cpsdirector.service import get_service
# g.service = get_service(uid, service_locator)
# if not g.service:
# return build_response(simplejson.dumps(False))
from cpsdirector.application import get_app_by_id
g.application = get_app_by_id(uid, service_locator)
if not g.application:
return build_response(simplejson.dumps(False))
log('cert_required: valid certificate (user %s, application %s)' % (uid, service_locator))
return fn(*args, **kwargs)
return decorated
class User(db.Model):
uid = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(80), unique=True, nullable=False)
fname = db.Column(db.String(256))
lname = db.Column(db.String(256))
email = db.Column(db.String(256), unique=True)
affiliation = db.Column(db.String(256))
password = db.Column(db.String(256))
created = db.Column(db.DateTime)
credit = db.Column(db.Integer)
uuid = db.Column(db.String(80))
openid = db.Column(db.String(200))
def __init__(self, **kwargs):
# Default values
self.credit = 0
self.created = datetime.now()
for key, val in kwargs.items():
setattr(self, key, val)
def to_dict(self):
return {
'uid': self.uid, 'username': self.username,
'fname': self.fname, 'lname': self.lname,
'email': self.email, 'affiliation': self.affiliation,
'password': self.password, 'credit': self.credit,
'created': self.created.isoformat(),
'uuid': self.uuid,
'openid': self.openid,
}
def get_user(username, password):
"""Return a User object if the specified (username, password) combination
is valid."""
# log("user login attempt with username '%s'" % username)
return User.query.filter_by(username=username,
password=hashlib.md5(password).hexdigest()).first()
def get_user_by_uuid(uuid):
"""Return a User object if the specified uuid is found."""
log("uuid login attempt with uuid '%s'" % uuid)
return User.query.filter_by(uuid=uuid).first()
def get_user_by_openid(openid):
"""Return a User object if the specified openid is found."""
log("openid login attempt with openid %s'" % openid)
return User.query.filter_by(openid=openid).first()
# from cpsdirector.application import Application
def list_users():
"""
List all known users.
"""
return User.query.all()
def create_user(username, fname, lname, email, affiliation, password, credit, uuid = None, openid = None):
"""Create a new user with the given attributes. Return a new User object
in case of successful creation. None otherwise."""
new_user = User(username=username,
fname=fname,
lname=lname,
email=email,
affiliation=affiliation,
password=hashlib.md5(password).hexdigest(),
credit=credit,
uuid=uuid,
openid=openid)
from cpsdirector.application import Application
# from cpsdirector.application import store_app_controller
app = Application(user=new_user)
db.session.add(new_user)
db.session.add(app)
db.session.flush()
# store_app_controller(new_user.uid, app.aid)
try:
db.session.commit()
return new_user
except Exception, err:
db.session.rollback()
raise err
def login_required(fn):
@wraps(fn)
def decorated_view(*args, **kwargs):
username = request.values.get('username', '')
password = request.values.get('password', '')
uuid = request.values.get('uuid', '')
if uuid == '<none>':
uuid = ''
openid = request.values.get('openid', '')
if openid == '<none>':
openid = ''
# Getting user data from DB through username and password
if len(username.strip()):
log("User '%s' is attempting to authenticate with "
"username and password" % username)
g.user = get_user(username, password)
if g.user:
# user authenticated through username and passwword
return fn(*args, **kwargs)
# authentication failed, try uuid
# Getting user data from DB through uuid
if len(uuid.strip()):
log("User is attempting to authenticate with uuid '%s'" % uuid)
g.user = get_user_by_uuid(uuid)
if g.user:
# user authenticated through uuid
return fn(*args, **kwargs)
# authentication failed, try openid
# Getting user data from DB through openid
if len(openid.strip()):
log("User is attempting to authenticate with openid '%s'" % openid)
g.user = get_user_by_openid(openid)
if g.user:
# user authenticated through openid
return fn(*args, **kwargs)
# authentication failed
msg = "User authentication failed"
log_error(msg)
return build_response(jsonify(error_response(msg)))
return decorated_view
@user_page.route("/new_user", methods=['POST'])
def new_user():
values = {}
all_fields = ('username', 'fname', 'lname', 'email',
'affiliation', 'password', 'credit', 'uuid', 'openid')
required_fields = ('username', 'email', 'password')
log('New user "%s <%s>" creation attempt' % (
request.values.get('username'), request.values.get('email')))
# check for presence of mandatory fields
for field in all_fields:
values[field] = request.values.get(field)
if field == 'uuid' and values[field] == '<none>':
values[field] = ''
for field in required_fields:
if not values[field]:
msg = "Missing required field: '%s'" % field
log_error(msg)
return build_response(jsonify(error_response(msg)))
# check if the provided username already exists
if User.query.filter_by(username=values['username']).first():
msg = "Username '%s' is already taken" % values['username']
log_error(msg)
return build_response(jsonify(error_response(msg)))
# check if the provided email already exists
if User.query.filter_by(email=values['email']).first():
msg = "E-mail '%s' is already registered" % values['email']
log_error(msg)
return build_response(jsonify(error_response(msg)))
# check that the requested credit does not exceed the maximum
if config_parser.has_option('conpaas', 'MAX_CREDIT'):
max_credit = config_parser.get('conpaas', 'MAX_CREDIT')
try:
max_credit = int(max_credit)
except ValueError:
log_error("Parameter MAX_CREDIT '%s' is not a valid integer."
" Defaulting to maximum credit %s." % (max_credit, default_max_credit))
max_credit = default_max_credit
if max_credit < 0:
log_error("Parameter MAX_CREDIT '%s' cannot be a negative number."
" Defaulting to maximum credit %s." % (max_credit, default_max_credit))
max_credit = default_max_credit
else:
max_credit = default_max_credit
try:
req_credit = int(values['credit'])
except ValueError:
msg = "Required credit '%s' is not a valid integer." % values['credit']
log_error(msg)
return build_response(jsonify(error_response(msg)))
if req_credit < 0:
msg = "Required credit %s cannot be a negative integer." % values['credit']
log_error(msg)
return build_response(jsonify(error_response(msg)))
if req_credit > max_credit:
msg = "Cannot allocate %s credit for a new user (max credit %s)." % (values['credit'], max_credit)
log_error(msg)
return build_response(jsonify(error_response(msg)))
try:
user = create_user(**values)
# successful creation
log("User '%s' created successfully" % user.username)
return build_response(simplejson.dumps(user.to_dict()))
except Exception, err:
# something went wrong
msg = 'Error upon user creation: %s -> %s' % (type(err), err)
log_error(msg)
return build_response(jsonify(error_response(msg)))
@user_page.route("/login", methods=['POST'])
@login_required
def login():
log("Successful login for user '%s'" % g.user.username)
# return user data
return build_response(simplejson.dumps(g.user.to_dict()))
@user_page.route("/getcerts", methods=['POST', 'GET'])
@login_required
def get_user_certs():
# Creates new certificates for this user
certs = x509cert.generate_certificate(
cert_dir=config_parser.get('conpaas', 'CERT_DIR'),
uid=str(g.user.uid),
# sid='0',
aid='0',
role='user',
email=g.user.email,
cn=g.user.username,
org='Contrail'
)
# In-memory zip file
zipdata = StringIO()
archive = zipfile.ZipFile(zipdata, mode='w')
# Add key.pem, cert.pem and ca_cert.pem to the zip file
for name, data in certs.items():
archive.writestr(name + '.pem', data)
archive.close()
zipdata.seek(0)
log("New certificates for user '%s' created" % g.user.username)
# Send zip archive to the client
return helpers.send_file(zipdata, mimetype="application/zip",
as_attachment=True, attachment_filename='certs.zip')
@user_page.route("/ca/get_cert.php", methods=['POST'])
@cert_required(role='manager')
def get_manager_cert():
log('Certificate request from manager %s (user %s)' % (
g.cert['serviceLocator'], g.cert['UID']))
csr = crypto.load_certificate_request(crypto.FILETYPE_PEM,
request.files['csr'].read())
return x509cert.create_x509_cert(config_parser.get('conpaas', 'CERT_DIR'), csr)
def _deduct_credit(decrement):
# Require decrement to be a positive integer
if decrement <= 0:
log_error('Decrement should be a positive integer')
return False
log('Decrement user credit: uid=%s, old_credit=%s, decrement=%s' % (g.user.uid, g.user.credit, decrement))
g.user.credit -= decrement
if g.user.credit > -1:
# User has enough credit
db.session.commit()
log('New credit for user %s: %s' % (g.user.uid, g.user.credit))
return True
db.session.rollback()
log_error('User %s does not have enough credit' % g.user.uid)
return False
@user_page.route("/callback/decrementUserCredit.php", methods=['POST'])
@cert_required(role='manager')
def credit():
"""POST /callback/decrementUserCredit.php
POSTed values must contain sid and decrement.
Returns a dictionary with the 'error' attribute set to False if the user
had enough credit, True otherwise.
"""
app_id = int(request.values.get('aid', -1))
decrement = int(request.values.get('decrement', 1))
success = _deduct_credit(app_id, decrement)
if success:
return build_response(jsonify({'error': False}))
return build_response(jsonify({'error': True}))
@user_page.route("/user_config", methods=['GET'])
@cert_required(role="user")
def user_config():
"""
Returns all information about a user identified by a certificate.
"""
user = g.user.to_dict()
user.pop('password', None)
return build_response(simplejson.dumps(user))
@user_page.route("/user_credit", methods=['GET'])
@cert_required(role="user")
def user_credit():
"""
Returns the remaining credit of a user identified by a certificate.
"""
return build_response(simplejson.dumps(g.user.credit))
def add_credit(username, credit):
"""Add credit to a user."""
user = User.query.filter_by(username=username).first()
if not user:
raise Exception("Unknown user '%s'" % username)
user.credit += credit
if user.credit < 0:
user.credit = 0
db.session.commit()
return user.credit
| {
"content_hash": "1c203d0bec06b34a62ac47fdaf336f8f",
"timestamp": "",
"source": "github",
"line_count": 430,
"max_line_length": 110,
"avg_line_length": 34.42325581395349,
"alnum_prop": 0.6010674233211728,
"repo_name": "ConPaaS-team/conpaas",
"id": "3b59dce0b13ea0e1081836597667b15fb4396917",
"size": "14827",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conpaas-director/cpsdirector/user.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "79"
},
{
"name": "Batchfile",
"bytes": "2136"
},
{
"name": "C",
"bytes": "12346"
},
{
"name": "CSS",
"bytes": "47680"
},
{
"name": "HTML",
"bytes": "5494"
},
{
"name": "Java",
"bytes": "404303"
},
{
"name": "JavaScript",
"bytes": "164519"
},
{
"name": "M4",
"bytes": "553"
},
{
"name": "Makefile",
"bytes": "78772"
},
{
"name": "Nginx",
"bytes": "1980"
},
{
"name": "PHP",
"bytes": "1900634"
},
{
"name": "Python",
"bytes": "2842443"
},
{
"name": "Shell",
"bytes": "232043"
},
{
"name": "Smarty",
"bytes": "15450"
}
],
"symlink_target": ""
} |
"""
owtf.models.error
~~~~~~~~~~~~~~~~~
"""
from owtf.lib.exceptions import InvalidErrorReference
from sqlalchemy import Boolean, Column, Integer, String
from owtf.db.model_base import Model
from owtf.db.session import flush_transaction
class Error(Model):
__tablename__ = "errors"
id = Column(Integer, primary_key=True)
owtf_message = Column(String)
traceback = Column(String, nullable=True)
user_message = Column(String, nullable=True)
reported = Column(Boolean, default=False)
github_issue_url = Column(String, nullable=True)
def __repr__(self):
return "<Error (traceback='{!s}')>".format(self.traceback)
@classmethod
def add_error(cls, session, message, trace):
obj = Error(owtf_message=message, traceback=trace)
session.add(obj)
session.commit()
return obj.to_dict()
@classmethod
def get_error(cls, session, error_id):
error = session.query(Error).get(error_id)
if not error: # If invalid error id, bail out
raise InvalidErrorReference("No error with id {!s}".format(error_id))
return error.to_dict()
@classmethod
def delete_error(cls, session, id):
error = session.query(cls).get(id)
if error:
session.delete(error)
session.commit()
else:
raise InvalidErrorReference("No error with id {!s}".format(id))
def to_dict(self):
obj = dict(self.__dict__)
obj.pop("_sa_instance_state", None)
return obj
@classmethod
def get_all_dict(cls, session):
errors = session.query(Error).all()
result = []
for err in errors:
result.append(err.to_dict())
return result
@classmethod
def update_error(cls, session, error_id, user_message):
obj = session.query(Error).filter(id=error_id)
if not obj: # If invalid error id, bail out
raise InvalidErrorReference("No error with id {!s}".format(error_id))
obj.user_message = user_message
session.merge(obj)
session.commit()
| {
"content_hash": "58b90e8bf0f2e0a1776f3634d80ead70",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 81,
"avg_line_length": 30.434782608695652,
"alnum_prop": 0.6195238095238095,
"repo_name": "owtf/owtf",
"id": "b2f3f966f57f1aa8c5cd90ffa5700c32280bd02a",
"size": "2100",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "owtf/models/error.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "146"
},
{
"name": "Dockerfile",
"bytes": "2160"
},
{
"name": "HTML",
"bytes": "1989"
},
{
"name": "JavaScript",
"bytes": "487381"
},
{
"name": "Makefile",
"bytes": "4814"
},
{
"name": "Python",
"bytes": "690934"
},
{
"name": "SCSS",
"bytes": "19170"
},
{
"name": "Shell",
"bytes": "52067"
},
{
"name": "TypeScript",
"bytes": "261109"
}
],
"symlink_target": ""
} |
try:
from urllib.parse import urljoin
from urllib.parse import urlencode
import urllib.request as urlrequest
except ImportError:
from urlparse import urljoin
from urllib import urlencode
import urllib2 as urlrequest
import json
API_URL_DEFAULT = 'https://api.hipchat.com/v1/'
FORMAT_DEFAULT = 'json'
class HipChat(object):
limits = {}
def __init__(self, token=None, url=API_URL_DEFAULT, format=FORMAT_DEFAULT):
self.url = url
self.token = token
self.format = format
self.opener = urlrequest.build_opener(urlrequest.HTTPSHandler())
class RequestWithMethod(urlrequest.Request):
def __init__(self, url, data=None, headers={}, origin_req_host=None, unverifiable=False, http_method=None):
urlrequest.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
if http_method:
self.method = http_method
def get_method(self):
if self.method:
return self.method
return urlrequest.Request.get_method(self)
def method(self, url, method="GET", parameters=None, timeout=None):
method_url = urljoin(self.url, url)
if method == "GET":
if not parameters:
parameters = dict()
parameters['format'] = self.format
parameters['auth_token'] = self.token
query_string = urlencode(parameters)
request_data = None
else:
query_parameters = dict()
query_parameters['auth_token'] = self.token
query_string = urlencode(query_parameters)
if parameters:
request_data = urlencode(parameters).encode('utf-8')
else:
request_data = None
method_url = method_url + '?' + query_string
req = self.RequestWithMethod(method_url, http_method=method, data=request_data)
response = self.opener.open(req, None, timeout)
info = response.info()
self.limits = {
'limit': int(info.getheaders('X-RateLimit-Limit')[0]),
'remaining': int(info.getheaders('X-RateLimit-Remaining')[0]),
'reset': int(info.getheaders('X-RateLimit-Reset')[0])
}
response_data = response.read()
return json.loads(response_data.decode('utf-8'))
def list_rooms(self):
return self.method('rooms/list')
def message_room(self, room_id='', message_from='', message='', message_format='text', color='', notify=False):
parameters = dict()
parameters['room_id'] = room_id
parameters['from'] = message_from[:15]
parameters['message'] = message
parameters['message_format'] = message_format
parameters['color'] = color
if notify:
parameters['notify'] = 1
else:
parameters['notify'] = 0
return self.method('rooms/message', 'POST', parameters)
| {
"content_hash": "3af07d2f2f3a2a412d07380871b1bde1",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 115,
"avg_line_length": 32.23913043478261,
"alnum_prop": 0.5967633175994606,
"repo_name": "idooo/pancake-hipchat-bot",
"id": "5078e9249eb1bed12572492c3049873acbc06165",
"size": "3073",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/simple_hipchat.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "55722"
}
],
"symlink_target": ""
} |
"""
==============================================================
Restricted Boltzmann Machine features for digit classification
==============================================================
For greyscale image data where pixel values can be interpreted as degrees of
blackness on a white background, like handwritten digit recognition, the
Bernoulli Restricted Boltzmann machine model (:class:`BernoulliRBM
<sklearn.neural_network.BernoulliRBM>`) can perform effective non-linear
feature extraction.
In order to learn good latent representations from a small dataset, we
artificially generate more labeled data by perturbing the training data with
linear shifts of 1 pixel in each direction.
This example shows how to build a classification pipeline with a BernoulliRBM
feature extractor and a :class:`LogisticRegression
<sklearn.linear_model.LogisticRegression>` classifier. The hyperparameters
of the entire model (learning rate, hidden layer size, regularization)
were optimized by grid search, but the search is not reproduced here because
of runtime constraints.
Logistic regression on raw pixel values is presented for comparison. The
example shows that the features extracted by the BernoulliRBM help improve the
classification accuracy.
"""
from __future__ import print_function
print(__doc__)
# Authors: Yann N. Dauphin, Vlad Niculae, Gabriel Synnaeve
# License: BSD
import sys
import numpy as np
import matplotlib.pyplot as plt
from util.reader import reader
from scipy.ndimage import convolve
from sklearn import linear_model, datasets, metrics, cross_validation, preprocessing
from sklearn.cross_validation import train_test_split
from sklearn.neural_network import BernoulliRBM
from sklearn.ensemble import RandomForestRegressor
from sklearn.pipeline import Pipeline
###############################################################################
# Setting up
class Bernoulli:
dataset = '/Users/jordansilva/Documents/Jordan/Mestrado/Lorien/code/output/vector.rbm'
def __init__(self):
r = reader(self.dataset)
self.data, self.labels, self.data_full = r.load(size=sys.maxsize, progress=False)
def run(self):
# Load Data
X = np.asarray(self.data, 'float32')
# X, Y = nudge_dataset(X, labels)
# X = (X - np.min(X, 0)) / (np.max(X, 0) + 0.0001) # 0-1 scaling
X_train, X_test, Y_train, Y_test = train_test_split(X, self.labels,
test_size=0.2,
random_state=0)
N = len(self.labels)
kf = cross_validation.KFold(N, n_folds=5)
fold = 1 ; mse = []
for train_index, test_index in kf:
print("FOLD:",fold,"TRAIN:", len(train_index), "TEST:", len(test_index)); fold+=1
X_train = X.iloc[train_index]
y_train = y.iloc[train_index]
X_test = X.iloc[test_index]
y_test = y.iloc[test_index]
model = RandomForestRegressor(n_estimators=10, n_jobs=4) # n_jobs=4
model.fit(X_train,y_train)
y_pred = model.predict(X_test)
mse.append( mean_squared_error(y_test,y_pred) )
print mse[-1]
sum(mse)/len(mse)
# Models we will use
logistic = linear_model.LogisticRegression()
logistic.C = 6000.0
sgd = linear_model.SGDClassifier()
binarizer = preprocessing.LabelBinarizer()
rbm = BernoulliRBM(random_state=0, verbose=True, batch_size=500, learning_rate=0.08, n_iter=20, n_components=256)
rbm2 = BernoulliRBM(random_state=0, verbose=True, batch_size=500, learning_rate=0.1, n_iter=10, n_components=100)
randomforest = RandomForestRegressor(n_estimators=10, n_jobs=4) # n_jobs=4
classifier = Pipeline(steps=[('rbm', rbm), ('rbm2', rbm2), ('sgd', sgd)])
###############################################################################
# Training
# Hyper-parameters. These were set by cross-validation,
# using a GridSearchCV. Here we are not performing cross-validation to
# save time.
# More components tend to give better prediction performance, but larger
# fitting time
# Training RBM-Logistic Pipeline
classifier.fit(X_train, Y_train)
# Training Logistic regression
# logistic_classifier = linear_model.LogisticRegression(C=100.0)
# logistic_classifier.fit(X_train, Y_train)
###############################################################################
# Evaluation
print()
print("Logistic regression using RBM features:\n%s\n" % (
metrics.classification_report(
Y_test,
classifier.predict(X_test))))
# print("Logistic regression using raw pixel features:\n%s\n" % (
# metrics.classification_report(
# Y_test,
# logistic_classifier.predict(X_test))))
###############################################################################
#plt.figure(figsize=(4.2, 4))
# for i, comp in enumerate(rbm.components_):
# plt.subplot(10, 10, i + 1)
# plt.imshow(comp.reshape((8, 8)), cmap=plt.cm.gray_r,
# interpolation='nearest')
# plt.xticks(())
# plt.yticks(())
# plt.suptitle('100 components extracted by RBM', fontsize=16)
# plt.subplots_adjust(0.08, 0.02, 0.92, 0.85, 0.08, 0.23)
plt.show()
return
if __name__ == '__main__':
b = Bernoulli()
b.run() | {
"content_hash": "437e845eba1142906b90b59170e98fa5",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 121,
"avg_line_length": 37.44078947368421,
"alnum_prop": 0.5824986821296785,
"repo_name": "jordansilva/lorien",
"id": "76fbc0c793887fbcae324b282375857e5ed05de0",
"size": "5691",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bernoulli_rbm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1512"
},
{
"name": "HTML",
"bytes": "2924"
},
{
"name": "JavaScript",
"bytes": "16158"
},
{
"name": "Python",
"bytes": "74776"
}
],
"symlink_target": ""
} |
import sys
from django.core.management.base import BaseCommand
from elections.constraints import check_constraints, ViolatedConstraint
from elections.models import Election
class Command(BaseCommand):
def handle(self, *args, **kwargs):
elections = Election.private_objects.all()
exitcode = 0
for election in elections:
try:
check_constraints(election)
except ViolatedConstraint as e:
exitcode = 1
self.stderr.write(str(e))
sys.exit(exitcode)
| {
"content_hash": "4df2ccef6d284b325a77d73fe7ffd872",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 71,
"avg_line_length": 32.470588235294116,
"alnum_prop": 0.6503623188405797,
"repo_name": "DemocracyClub/EveryElection",
"id": "a20eb760da38595a1b2e51e34cd6a16c444f8b3b",
"size": "552",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "every_election/apps/elections/management/commands/elections_check_constraints.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "37294"
},
{
"name": "JavaScript",
"bytes": "3930"
},
{
"name": "Python",
"bytes": "548734"
},
{
"name": "SCSS",
"bytes": "3314"
}
],
"symlink_target": ""
} |
try:
from django.utils.crypto import salted_hmac
except ImportError:
raise ImportError("This module depends on django.")
def get_hmac(key_salt, value):
return salted_hmac(key_salt, value).hexdigest()
| {
"content_hash": "5329142e822b978c8a2042c78dda8256",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 55,
"avg_line_length": 26.75,
"alnum_prop": 0.7336448598130841,
"repo_name": "polyaxon/polyaxon",
"id": "85e7e334bc7f5c1c796cc2a613f178eecb1449a9",
"size": "819",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "platform/polycommon/polycommon/crypto.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1989"
},
{
"name": "Python",
"bytes": "5201898"
},
{
"name": "Shell",
"bytes": "1565"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import argparse
import errno
import json
import logging
import os
import platform
import shutil
import subprocess
import sys
import tempfile
import traceback
from instack import runner
LOG = logging.getLogger()
def load_args(argv):
parser = argparse.ArgumentParser(
description="Execute diskimage-builder elements on the current "
"system.")
parser.add_argument(
'-e', '--element', nargs='*',
help="element(s) to execute")
parser.add_argument(
'-p', '--element-path', nargs='+',
help=("element path(s) to search for elements (ELEMENTS_PATH "
"environment variable will take precedence if defined)"))
parser.add_argument(
'-k', '--hook', nargs='*',
help=("hook(s) to execute for each element"))
parser.add_argument(
'-b', '--blacklist', nargs='*',
help=("script names, that if found, will be blacklisted and not run"))
parser.add_argument(
'-x', '--exclude-element', nargs='*',
help=("element names that will be excluded from running even if "
"they are listed as dependencies"))
parser.add_argument(
'-j', '--json-file',
help=("read runtime configuration from json file"))
parser.add_argument(
'-d', '--debug', action='store_true',
help=("Debugging output"))
parser.add_argument(
'-i', '--interactive', action='store_true',
help=("If set, prompt to continue running after a failed script."))
parser.add_argument(
'--dry-run', action='store_true',
help=("Dry run only, don't actually modify system, prints out "
"what would have been run."))
parser.add_argument(
'--no-cleanup', action='store_true',
help=("Do not cleanup tmp directories"))
parser.add_argument(
'-l', '--logfile', action='store',
default=os.path.join(os.path.expanduser('~'), '.instack/instack.log'),
help=("Logfile to log all actions"))
args = parser.parse_args(argv)
if args.json_file and (args.element or args.hook or args.exclude_element):
print("--json-file not compatible with --element, --hook,")
print("--exclude-element, or --blacklist")
sys.exit(1)
return args
def set_environment(tmp_dir):
"""Set environment variables that diskimage-builder elements expect."""
os.environ['TMP_MOUNT_PATH'] = os.path.join(tmp_dir, 'mnt')
os.symlink('/', os.environ['TMP_MOUNT_PATH'])
os.environ['DIB_OFFLINE'] = ''
os.environ['DIB_INIT_SYSTEM'] = 'systemd'
os.environ['DIB_IMAGE_CACHE'] = (
'%s/.cache/image-create' % os.environ['HOME'])
os.environ['IMAGE_NAME'] = 'instack'
os.environ['PATH'] = "%s:/usr/local/bin" % os.environ['PATH']
os.environ.setdefault('DIB_DEFAULT_INSTALLTYPE', 'package')
if os.path.exists('/usr/share/diskimage-builder/lib'):
os.environ['_LIB'] = '/usr/share/diskimage-builder/lib'
elif os.path.exists('diskimage-builder/lib'):
os.environ['_LIB'] = 'diskimage-builder/lib'
else:
raise Exception("Can't detect diskimage-builder lib directory.")
os.environ['TARGET_ROOT'] = '/'
if platform.processor() == 'x86_64':
os.environ['ARCH'] = 'amd64'
else:
os.environ['ARCH'] = 'i386'
os.environ['DIB_ENV'] = (
subprocess.check_output(['export', '|', 'grep', '\' DIB_.*=\''],
shell=True))
os.environ['DIB_ARGS'] = str(sys.argv)
def cleanup(tmp_dir):
shutil.rmtree(tmp_dir)
def main(argv=sys.argv):
args = load_args(argv[1:])
tmp_dir = tempfile.mkdtemp(prefix='instack.')
try:
os.makedirs(os.path.dirname(args.logfile))
except OSError as e:
if e.errno != errno.EEXIST:
raise
formatter = logging.Formatter("%(levelname)s: %(asctime)s -- %(message)s")
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging.INFO)
LOG.setLevel(logging.DEBUG)
LOG.addHandler(stream_handler)
file_handler = logging.FileHandler(args.logfile)
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.INFO)
LOG.addHandler(file_handler)
LOG.info("Starting run of instack")
set_environment(tmp_dir)
try:
if args.json_file:
LOG.info("Using json file: %s" % args.json_file)
json_list = json.loads(open(args.json_file).read())
if not isinstance(json_list, list):
raise Exception("json file should be a list structure")
for run in json_list:
if "name" in run:
LOG.info("Running %s" % run["name"])
em = runner.ElementRunner(
run['element'], run['hook'], args.element_path,
run.get('blacklist', []), run.get('exclude-element', []),
args.dry_run, args.interactive, args.no_cleanup)
em.run()
else:
em = runner.ElementRunner(
args.element, args.hook, args.element_path,
args.blacklist, args.exclude_element,
args.dry_run, args.interactive,
args.no_cleanup)
em.run()
except Exception as e:
LOG.error(e.message)
LOG.error(traceback.print_tb(sys.exc_info()[2]))
sys.exit(1)
finally:
cleanup(tmp_dir)
LOG.info("Ending run of instack.")
if __name__ == '__main__':
main()
| {
"content_hash": "3d41ef0c2fd3c629f1cf84c4099ac3cc",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 78,
"avg_line_length": 33.208333333333336,
"alnum_prop": 0.5929378024735615,
"repo_name": "rdo-management/instack",
"id": "89fe63a02acc5420fa7e853d2d93670bd0d19bda",
"size": "6175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "instack/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "19660"
},
{
"name": "Shell",
"bytes": "297"
}
],
"symlink_target": ""
} |
'''Program
'''
import re
from fabric.api import run, hosts, cd, local, prefix
__author__ = 'noahsark'
def get_name(string):
pat = re.compile("http://dbpedia.org/resource/(.*)")
match = pat.match(string)
if match is not None:
return match.group(1).replace('_', ' ')
def clear_data(core):
local('curl "http://localhost:8983/solr/%s/update?stream.body=<delete><query>*:*</query></delete>&commit=true"' % core)
def feed_data(core, fpath):
url = 'http://localhost:8983/solr/%s/update' % core
local("curl %s/json?commit=true --data-binary @%s -i -H 'Content-type:application/json'" % (
url, fpath))
local("curl %s?softCommit=true -i" % (url))
| {
"content_hash": "fa94106b6cd9427082b42bb6e976b8dd",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 123,
"avg_line_length": 27.72,
"alnum_prop": 0.6219336219336219,
"repo_name": "jimmylai/knowledge",
"id": "ae63864e7206e5ffd4675af4b3813147b30c7499",
"size": "740",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dbpedia/util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4173"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from datetime import datetime, date
from caspy import models, time
def test_fixture():
currency_objs = _load(models.Currency, currency_data)
accounttype_objs = _load(models.AccountType, accounttype_data)
book_objs = [models.Book.objects.create(**d) for d in book_data]
income_kwargs = {
'currency': currency_objs[0],
'book': book_objs[0],
'account_type': accounttype_objs[0],
}
income = create_account('Income', income_kwargs)
salary = create_account('Salary', income_kwargs)
models.Account.tree.attach(salary, income)
tips = create_account('Tips', income_kwargs)
asset_kwargs = income_kwargs.copy()
asset_kwargs['account_type'] = accounttype_objs[1]
citibank = create_account('Citibank', asset_kwargs)
asset2_kwargs = {
'currency': currency_objs[1],
'book': book_objs[1],
'account_type': accounttype_objs[1],
}
chase = create_account('Chase', asset2_kwargs)
transactions = list(create_transactions(salary, tips, citibank))
return {
'currencies': currency_objs,
'accounttypes': accounttype_objs,
'books': book_objs,
'accounts': [income, salary, tips, citibank, chase],
'transactions': transactions,
}
def _load(django_model, data):
objs = [django_model(**d) for d in data]
django_model.objects.bulk_create(objs)
return objs
def create_account(name, kwargs):
desc = name + ' Test Account'
return models.Account.objects.create(name=name, description=desc, **kwargs)
def create_transactions(*accounts):
for xdata in transaction_data:
xact = models.Transaction.objects.create(
date=xdata['date'],
description=xdata['description'],
)
xdata['transaction_id'] = xact.transaction_id
for sdata in xdata['splits']:
account = find(sdata['account__name'], accounts)
sdata['account_id'] = account.account_id
models.Split.objects.create(
transaction=xact,
account=account,
number=sdata['number'],
status=sdata['status'],
amount=sdata['amount'],
description=sdata.get('description', ''),
)
yield xact
def find(account_name, accounts):
for a in accounts:
if a.name == account_name:
return a
currency_data = [
{
'cur_code': 'USD',
'shortcut': '$',
'symbol': '$',
'long_name': 'US Dollar',
},
{
'cur_code': 'CAD',
'long_name': 'Canadian Dollar',
'symbol': '$',
'shortcut': 'C',
},
{
'pk': 'EUR',
'long_name': 'Euro',
'symbol': '€',
'shortcut': 'E',
},
]
accounttype_data = [
{
'account_type': 'Income',
'sign': False,
'credit_term': 'income',
'debit_term': 'expense',
},
{
'account_type': 'Bank Account',
'sign': True,
'credit_term': 'withdraw',
'debit_term': 'deposit',
},
]
transaction_data = [
{
'date': date(2015, 7, 3),
'description': 'Payday',
'splits': [
{
'number': '100',
'account__name': 'Salary',
'status': 'c',
'amount': -8000,
'description': 'ABC',
},
{
'number': '1339',
'account__name': 'Citibank',
'status': 'c',
'amount': 8000,
'description': 'DEF',
},
],
},
{
'date': date(2015, 6, 28),
'description': 'Tips w/dl',
'splits': [
{
'number': '129',
'account__name': 'Tips',
'status': 'n',
'amount': -837,
'description': 'HIJ',
},
{
'number': '1345',
'account__name': 'Citibank',
'status': 'n',
'amount': 837,
'description': 'KLM',
},
],
},
]
u = time.utc.localize
book_data = [
{'name': 'Test Book 1', 'created_at': u(datetime(2015, 7, 22, 15))},
{'name': 'Test Book 2', 'created_at': u(datetime(2015, 7, 22, 16))},
]
| {
"content_hash": "bb859024f8782145bfb4a0d0058e9c0b",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 79,
"avg_line_length": 29.94375,
"alnum_prop": 0.45794197453558755,
"repo_name": "altaurog/django-caspy",
"id": "2ca896540c059c6836b3601976fafb45cc4754b7",
"size": "4818",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/testapp/fixtures.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "564"
},
{
"name": "HTML",
"bytes": "11065"
},
{
"name": "JavaScript",
"bytes": "26452"
},
{
"name": "Python",
"bytes": "126094"
}
],
"symlink_target": ""
} |
from django.test import RequestFactory, TransactionTestCase
from html5_appcache.cache import (get_cached_manifest, set_cached_manifest,
clear_cache_manifest, get_cache_version,
reset_cache_manifest, set_cached_value)
from html5_appcache.views import ManifestAppCache
class CacheTest(TransactionTestCase):
def setUp(self):
super(CacheTest, self).setUp()
clear_cache_manifest()
def test_cache(self):
self.assertIsNone(get_cache_version())
self.assertIsNone(get_cached_manifest())
set_cached_manifest("ciao")
self.assertEqual(get_cache_version(), 1)
self.assertEqual(get_cached_manifest(), "ciao")
reset_cache_manifest()
set_cached_value("nociao", 1)
self.assertEqual(get_cached_manifest(), "ciao")
self.assertEqual(get_cache_version(), 2)
def test_base_view(self):
request = RequestFactory().get('/')
view = ManifestAppCache.as_view()
response = view(request)
self.assertContains(response, "CACHE MANIFEST")
| {
"content_hash": "690b293818e341bcfafb08c916b4c53e",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 75,
"avg_line_length": 37.06666666666667,
"alnum_prop": 0.6393884892086331,
"repo_name": "nephila/django-html5-appcache",
"id": "eaaa335896580063bc321d9eeeadff16164d7778",
"size": "1136",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "html5_appcache/tests/cache.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10542"
}
],
"symlink_target": ""
} |
'''This module provides helper functions for Gnome/GLib related
functionality such as gobject-introspection and gresources.'''
from .. import build
import os
import sys
import copy
import subprocess
from . import ModuleReturnValue
from ..mesonlib import MesonException, OrderedSet, Popen_safe
from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
from .. import mlog
from .. import mesonlib
from .. import compilers
from .. import interpreter
from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget
from . import find_program, get_include_args
from . import ExtensionModule
from . import noKwargs, permittedKwargs
# gresource compilation is broken due to the way
# the resource compiler and Ninja clash about it
#
# https://github.com/ninja-build/ninja/issues/1184
# https://bugzilla.gnome.org/show_bug.cgi?id=774368
gresource_dep_needed_version = '>= 2.51.1'
native_glib_version = None
girwarning_printed = False
gdbuswarning_printed = False
gresource_warning_printed = False
_gir_has_extra_lib_arg = None
def gir_has_extra_lib_arg():
global _gir_has_extra_lib_arg
if _gir_has_extra_lib_arg is not None:
return _gir_has_extra_lib_arg
_gir_has_extra_lib_arg = False
try:
g_ir_scanner = find_program('g-ir-scanner', '').get_command()
opts = Popen_safe(g_ir_scanner + ['--help'], stderr=subprocess.STDOUT)[1]
_gir_has_extra_lib_arg = '--extra-library' in opts
except (MesonException, FileNotFoundError, subprocess.CalledProcessError):
pass
return _gir_has_extra_lib_arg
class GnomeModule(ExtensionModule):
gir_dep = None
@staticmethod
def _get_native_glib_version(state):
global native_glib_version
if native_glib_version is None:
glib_dep = PkgConfigDependency('glib-2.0', state.environment,
{'native': True})
native_glib_version = glib_dep.get_version()
return native_glib_version
def __print_gresources_warning(self, state):
global gresource_warning_printed
if not gresource_warning_printed:
if not mesonlib.version_compare(self._get_native_glib_version(state), gresource_dep_needed_version):
mlog.warning('GLib compiled dependencies do not work reliably with \n'
'the current version of GLib. See the following upstream issue:',
mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368'))
gresource_warning_printed = True
return []
@staticmethod
def _print_gdbus_warning():
global gdbuswarning_printed
if not gdbuswarning_printed:
mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n'
' include_directories of targets with GLib < 2.51.3:',
mlog.bold('https://github.com/mesonbuild/meson/issues/1387'))
gdbuswarning_printed = True
@permittedKwargs({'source_dir', 'c_name', 'dependencies', 'export', 'gresource_bundle', 'install_header',
'install', 'install_dir', 'extra_args'})
def compile_resources(self, state, args, kwargs):
self.__print_gresources_warning(state)
glib_version = self._get_native_glib_version(state)
cmd = ['glib-compile-resources', '@INPUT@']
source_dirs = kwargs.pop('source_dir', [])
if not isinstance(source_dirs, list):
source_dirs = [source_dirs]
if len(args) < 2:
raise MesonException('Not enough arguments; the name of the resource '
'and the path to the XML file are required')
dependencies = kwargs.pop('dependencies', [])
if not isinstance(dependencies, list):
dependencies = [dependencies]
# Validate dependencies
for (ii, dep) in enumerate(dependencies):
if hasattr(dep, 'held_object'):
dependencies[ii] = dep = dep.held_object
if not isinstance(dep, (mesonlib.File, build.CustomTarget)):
m = 'Unexpected dependency type {!r} for gnome.compile_resources() ' \
'"dependencies" argument.\nPlease pass the return value of ' \
'custom_target() or configure_file()'
raise MesonException(m.format(dep))
if isinstance(dep, build.CustomTarget):
if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
m = 'The "dependencies" argument of gnome.compile_resources() can not\n' \
'be used with the current version of glib-compile-resources due to\n' \
'<https://bugzilla.gnome.org/show_bug.cgi?id=774368>'
raise MesonException(m)
ifile = args[1]
if isinstance(ifile, mesonlib.File):
# glib-compile-resources will be run inside the source dir,
# so we need either 'src_to_build' or the absolute path.
# Absolute path is the easiest choice.
if ifile.is_built:
ifile = os.path.join(state.environment.get_build_dir(), ifile.subdir, ifile.fname)
else:
ifile = os.path.join(ifile.subdir, ifile.fname)
elif isinstance(ifile, str):
ifile = os.path.join(state.subdir, ifile)
elif isinstance(ifile, (interpreter.CustomTargetHolder,
interpreter.GeneratedObjectsHolder)):
m = 'Resource xml files generated at build-time cannot be used ' \
'with gnome.compile_resources() because we need to scan ' \
'the xml for dependencies. Use configure_file() instead ' \
'to generate it at configure-time.'
raise MesonException(m)
else:
raise MesonException('Invalid file argument: {!r}'.format(ifile))
depend_files, depends, subdirs = self._get_gresource_dependencies(
state, ifile, source_dirs, dependencies)
# Make source dirs relative to build dir now
source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs]
# Always include current directory, but after paths set by user
source_dirs.append(os.path.join(state.build_to_src, state.subdir))
# Ensure build directories of generated deps are included
source_dirs += subdirs
for source_dir in OrderedSet(source_dirs):
cmd += ['--sourcedir', source_dir]
if 'c_name' in kwargs:
cmd += ['--c-name', kwargs.pop('c_name')]
export = kwargs.pop('export', False)
if not export:
cmd += ['--internal']
cmd += ['--generate', '--target', '@OUTPUT@']
cmd += mesonlib.stringlistify(kwargs.pop('extra_args', []))
gresource = kwargs.pop('gresource_bundle', False)
if gresource:
output = args[0] + '.gresource'
name = args[0] + '_gresource'
else:
output = args[0] + '.c'
name = args[0] + '_c'
if kwargs.get('install', False) and not gresource:
raise MesonException('The install kwarg only applies to gresource bundles, see install_header')
install_header = kwargs.pop('install_header', False)
if install_header and gresource:
raise MesonException('The install_header kwarg does not apply to gresource bundles')
if install_header and not export:
raise MesonException('GResource header is installed yet export is not enabled')
kwargs['input'] = args[1]
kwargs['output'] = output
kwargs['depends'] = depends
if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
# This will eventually go out of sync if dependencies are added
kwargs['depend_files'] = depend_files
kwargs['command'] = cmd
else:
depfile = kwargs['output'] + '.d'
kwargs['depfile'] = depfile
kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
target_c = GResourceTarget(name, state.subdir, kwargs)
if gresource: # Only one target for .gresource files
return ModuleReturnValue(target_c, [target_c])
h_kwargs = {
'command': cmd,
'input': args[1],
'output': args[0] + '.h',
# The header doesn't actually care about the files yet it errors if missing
'depends': depends
}
if install_header:
h_kwargs['install'] = install_header
h_kwargs['install_dir'] = kwargs.get('install_dir',
state.environment.coredata.get_builtin_option('includedir'))
target_h = GResourceHeaderTarget(args[0] + '_h', state.subdir, h_kwargs)
rv = [target_c, target_h]
return ModuleReturnValue(rv, rv)
def _get_gresource_dependencies(self, state, input_file, source_dirs, dependencies):
cmd = ['glib-compile-resources',
input_file,
'--generate-dependencies']
for source_dir in source_dirs:
cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)]
cmd += ['--sourcedir', state.subdir] # Current dir
pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir())
if pc.returncode != 0:
m = 'glib-compile-resources failed to get dependencies for {}:\n{}'
mlog.warning(m.format(cmd[1], stderr))
raise subprocess.CalledProcessError(pc.returncode, cmd)
dep_files = stdout.split('\n')[:-1]
# In generate-dependencies mode, glib-compile-resources doesn't raise
# an error for missing resources but instead prints whatever filename
# was listed in the input file. That's good because it means we can
# handle resource files that get generated as part of the build, as
# follows.
#
# If there are multiple generated resource files with the same basename
# then this code will get confused.
def exists_in_srcdir(f):
return os.path.exists(os.path.join(state.environment.get_source_dir(), f))
missing_dep_files = [f for f in dep_files if not exists_in_srcdir(f)]
depends = []
subdirs = []
for missing in missing_dep_files:
found = False
missing_basename = os.path.basename(missing)
for dep in dependencies:
if hasattr(dep, 'held_object'):
dep = dep.held_object
if isinstance(dep, mesonlib.File):
if dep.fname == missing_basename:
found = True
dep_files.remove(missing)
dep_files.append(dep)
subdirs.append(dep.subdir)
break
elif isinstance(dep, build.CustomTarget):
if dep.get_basename() == missing_basename:
found = True
dep_files.remove(missing)
dep_files.append(
mesonlib.File(
is_built=True,
subdir=dep.get_subdir(),
fname=dep.get_basename()))
depends.append(dep)
subdirs.append(dep.get_subdir())
break
else:
raise RuntimeError('Unreachable code.')
if not found:
raise MesonException(
'Resource "%s" listed in "%s" was not found. If this is a '
'generated file, pass the target that generates it to '
'gnome.compile_resources() using the "dependencies" '
'keyword argument.' % (missing, input_file))
return dep_files, depends, subdirs
def _get_link_args(self, state, lib, depends=None, include_rpath=False,
use_gir_args=False):
link_command = []
# Construct link args
if isinstance(lib, build.SharedLibrary):
libdir = os.path.join(state.environment.get_build_dir(), state.backend.get_target_dir(lib))
link_command.append('-L' + libdir)
# Needed for the following binutils bug:
# https://github.com/mesonbuild/meson/issues/1911
# However, g-ir-scanner does not understand -Wl,-rpath
# so we need to use -L instead
for d in state.backend.determine_rpath_dirs(lib):
d = os.path.join(state.environment.get_build_dir(), d)
link_command.append('-L' + d)
if include_rpath:
link_command.append('-Wl,-rpath,' + d)
if include_rpath:
link_command.append('-Wl,-rpath,' + libdir)
if depends:
depends.append(lib)
if gir_has_extra_lib_arg() and use_gir_args:
link_command.append('--extra-library=' + lib.name)
else:
link_command.append('-l' + lib.name)
return link_command
def _get_dependencies_flags(self, deps, state, depends=None, include_rpath=False,
use_gir_args=False):
cflags = OrderedSet()
ldflags = OrderedSet()
gi_includes = OrderedSet()
if not isinstance(deps, list):
deps = [deps]
for dep in deps:
if hasattr(dep, 'held_object'):
dep = dep.held_object
if isinstance(dep, InternalDependency):
cflags.update(get_include_args(dep.include_directories))
for lib in dep.libraries:
if hasattr(lib, 'held_object'):
lib = lib.held_object
ldflags.update(self._get_link_args(state, lib, depends, include_rpath))
libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath,
use_gir_args)
cflags.update(libdepflags[0])
ldflags.update(libdepflags[1])
gi_includes.update(libdepflags[2])
extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends, include_rpath,
use_gir_args)
cflags.update(extdepflags[0])
ldflags.update(extdepflags[1])
gi_includes.update(extdepflags[2])
for source in dep.sources:
if hasattr(source, 'held_object'):
source = source.held_object
if isinstance(source, GirTarget):
gi_includes.update([os.path.join(state.environment.get_build_dir(),
source.get_subdir())])
# This should be any dependency other than an internal one.
elif isinstance(dep, Dependency):
cflags.update(dep.get_compile_args())
for lib in dep.get_link_args():
if (os.path.isabs(lib) and
# For PkgConfigDependency only:
getattr(dep, 'is_libtool', False)):
lib_dir = os.path.dirname(lib)
ldflags.update(["-L%s" % lib_dir])
if include_rpath:
ldflags.update(['-Wl,-rpath {}'.format(lib_dir)])
libname = os.path.basename(lib)
if libname.startswith("lib"):
libname = libname[3:]
libname = libname.split(".so")[0]
lib = "-l%s" % libname
# Hack to avoid passing some compiler options in
if lib.startswith("-W"):
continue
if gir_has_extra_lib_arg() and use_gir_args and lib.startswith("-l"):
lib = lib.replace('-l', '--extra-library=', 1)
ldflags.update([lib])
if isinstance(dep, PkgConfigDependency):
girdir = dep.get_pkgconfig_variable("girdir")
if girdir:
gi_includes.update([girdir])
elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
cflags.update(get_include_args(dep.get_include_dirs()))
else:
mlog.log('dependency %s not handled to build gir files' % dep)
continue
return cflags, ldflags, gi_includes
@permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix',
'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories',
'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args',
'packages'})
def generate_gir(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Gir takes one argument')
if kwargs.get('install_dir'):
raise MesonException('install_dir is not supported with generate_gir(), see "install_dir_gir" and "install_dir_typelib"')
giscanner = find_program('g-ir-scanner', 'Gir')
gicompiler = find_program('g-ir-compiler', 'Gir')
girtarget = args[0]
while hasattr(girtarget, 'held_object'):
girtarget = girtarget.held_object
if not isinstance(girtarget, (build.Executable, build.SharedLibrary)):
raise MesonException('Gir target must be an executable or shared library')
try:
if not self.gir_dep:
self.gir_dep = PkgConfigDependency('gobject-introspection-1.0',
state.environment,
{'native': True})
pkgargs = self.gir_dep.get_compile_args()
except Exception:
raise MesonException('gobject-introspection dependency was not found, gir cannot be generated.')
ns = kwargs.pop('namespace')
nsversion = kwargs.pop('nsversion')
libsources = kwargs.pop('sources')
girfile = '%s-%s.gir' % (ns, nsversion)
depends = [girtarget]
gir_inc_dirs = []
scan_command = [giscanner, '@INPUT@']
scan_command += pkgargs
scan_command += ['--no-libtool', '--namespace=' + ns, '--nsversion=' + nsversion, '--warn-all',
'--output', '@OUTPUT@']
extra_args = mesonlib.stringlistify(kwargs.pop('extra_args', []))
scan_command += extra_args
scan_command += ['-I' + os.path.join(state.environment.get_source_dir(), state.subdir),
'-I' + os.path.join(state.environment.get_build_dir(), state.subdir)]
scan_command += get_include_args(girtarget.get_include_dirs())
if 'link_with' in kwargs:
link_with = kwargs.pop('link_with')
if not isinstance(link_with, list):
link_with = [link_with]
for link in link_with:
scan_command += self._get_link_args(state, link.held_object, depends,
use_gir_args=True)
if 'includes' in kwargs:
includes = kwargs.pop('includes')
if not isinstance(includes, list):
includes = [includes]
for inc in includes:
if hasattr(inc, 'held_object'):
inc = inc.held_object
if isinstance(inc, str):
scan_command += ['--include=%s' % (inc, )]
elif isinstance(inc, GirTarget):
gir_inc_dirs += [
os.path.join(state.environment.get_build_dir(),
inc.get_subdir()),
]
scan_command += [
"--include=%s" % (inc.get_basename()[:-4], ),
]
depends += [inc]
else:
raise MesonException(
'Gir includes must be str, GirTarget, or list of them')
cflags = []
ldflags = []
for lang, compiler in girtarget.compilers.items():
# XXX: Can you use g-i with any other language?
if lang in ('c', 'cpp', 'objc', 'objcpp', 'd'):
break
else:
lang = None
compiler = None
if lang and compiler:
if state.global_args.get(lang):
cflags += state.global_args[lang]
if state.project_args.get(lang):
cflags += state.project_args[lang]
if 'b_sanitize' in compiler.base_options:
sanitize = state.environment.coredata.base_options['b_sanitize'].value
cflags += compilers.sanitizer_compile_args(sanitize)
if sanitize == 'address':
ldflags += ['-lasan']
# FIXME: Linking directly to libasan is not recommended but g-ir-scanner
# does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892
# ldflags += compilers.sanitizer_link_args(sanitize)
if kwargs.get('symbol_prefix'):
sym_prefix = kwargs.pop('symbol_prefix')
if not isinstance(sym_prefix, str):
raise MesonException('Gir symbol prefix must be str')
scan_command += ['--symbol-prefix=%s' % sym_prefix]
if kwargs.get('identifier_prefix'):
identifier_prefix = kwargs.pop('identifier_prefix')
if not isinstance(identifier_prefix, str):
raise MesonException('Gir identifier prefix must be str')
scan_command += ['--identifier-prefix=%s' % identifier_prefix]
if kwargs.get('export_packages'):
pkgs = kwargs.pop('export_packages')
if isinstance(pkgs, str):
scan_command += ['--pkg-export=%s' % pkgs]
elif isinstance(pkgs, list):
scan_command += ['--pkg-export=%s' % pkg for pkg in pkgs]
else:
raise MesonException('Gir export packages must be str or list')
deps = kwargs.pop('dependencies', [])
if not isinstance(deps, list):
deps = [deps]
deps = (girtarget.get_all_link_deps() + girtarget.get_external_deps() +
deps)
# Need to recursively add deps on GirTarget sources from our
# dependencies and also find the include directories needed for the
# typelib generation custom target below.
typelib_includes = []
for dep in deps:
if hasattr(dep, 'held_object'):
dep = dep.held_object
# Add a dependency on each GirTarget listed in dependencies and add
# the directory where it will be generated to the typelib includes
if isinstance(dep, InternalDependency):
for source in dep.sources:
if hasattr(source, 'held_object'):
source = source.held_object
if isinstance(source, GirTarget) and source not in depends:
depends.append(source)
subdir = os.path.join(state.environment.get_build_dir(),
source.get_subdir())
if subdir not in typelib_includes:
typelib_includes.append(subdir)
# Do the same, but for dependencies of dependencies. These are
# stored in the list of generated sources for each link dep (from
# girtarget.get_all_link_deps() above).
# FIXME: Store this in the original form from declare_dependency()
# so it can be used here directly.
elif isinstance(dep, build.SharedLibrary):
for source in dep.generated:
if isinstance(source, GirTarget):
subdir = os.path.join(state.environment.get_build_dir(),
source.get_subdir())
if subdir not in typelib_includes:
typelib_includes.append(subdir)
elif isinstance(dep, PkgConfigDependency):
girdir = dep.get_pkgconfig_variable("girdir")
if girdir and girdir not in typelib_includes:
typelib_includes.append(girdir)
# ldflags will be misinterpreted by gir scanner (showing
# spurious dependencies) but building GStreamer fails if they
# are not used here.
dep_cflags, dep_ldflags, gi_includes = self._get_dependencies_flags(deps, state, depends,
use_gir_args=True)
cflags += list(dep_cflags)
ldflags += list(dep_ldflags)
scan_command += ['--cflags-begin']
scan_command += cflags
scan_command += ['--cflags-end']
# need to put our output directory first as we need to use the
# generated libraries instead of any possibly installed system/prefix
# ones.
if isinstance(girtarget, build.SharedLibrary):
scan_command += ["-L@PRIVATE_OUTDIR_ABS_%s@" % girtarget.get_id()]
scan_command += list(ldflags)
for i in gi_includes:
scan_command += ['--add-include-path=%s' % i]
inc_dirs = kwargs.pop('include_directories', [])
if not isinstance(inc_dirs, list):
inc_dirs = [inc_dirs]
for incd in inc_dirs:
if not isinstance(incd.held_object, (str, build.IncludeDirs)):
raise MesonException(
'Gir include dirs should be include_directories().')
scan_command += get_include_args(inc_dirs)
scan_command += get_include_args(gir_inc_dirs + inc_dirs, prefix='--add-include-path=')
if isinstance(girtarget, build.Executable):
scan_command += ['--program', girtarget]
elif isinstance(girtarget, build.SharedLibrary):
libname = girtarget.get_basename()
# Needed for the following binutils bug:
# https://github.com/mesonbuild/meson/issues/1911
# However, g-ir-scanner does not understand -Wl,-rpath
# so we need to use -L instead
for d in state.backend.determine_rpath_dirs(girtarget):
d = os.path.join(state.environment.get_build_dir(), d)
scan_command.append('-L' + d)
scan_command += ['--library', libname]
scankwargs = {'output': girfile,
'input': libsources,
'command': scan_command,
'depends': depends}
if kwargs.get('install'):
scankwargs['install'] = kwargs['install']
scankwargs['install_dir'] = kwargs.get('install_dir_gir',
os.path.join(state.environment.get_datadir(), 'gir-1.0'))
scan_target = GirTarget(girfile, state.subdir, scankwargs)
typelib_output = '%s-%s.typelib' % (ns, nsversion)
typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@']
typelib_cmd += get_include_args(gir_inc_dirs, prefix='--includedir=')
for incdir in typelib_includes:
typelib_cmd += ["--includedir=" + incdir]
typelib_kwargs = {
'output': typelib_output,
'command': typelib_cmd,
}
if kwargs.get('install'):
typelib_kwargs['install'] = kwargs['install']
typelib_kwargs['install_dir'] = kwargs.get('install_dir_typelib',
os.path.join(state.environment.get_libdir(), 'girepository-1.0'))
typelib_target = TypelibTarget(typelib_output, state.subdir, typelib_kwargs)
rv = [scan_target, typelib_target]
return ModuleReturnValue(rv, rv)
@noKwargs
def compile_schemas(self, state, args, kwargs):
if args:
raise MesonException('Compile_schemas does not take positional arguments.')
srcdir = os.path.join(state.build_to_src, state.subdir)
outdir = state.subdir
cmd = [find_program('glib-compile-schemas', 'gsettings-compile')]
cmd += ['--targetdir', outdir, srcdir]
kwargs['command'] = cmd
kwargs['input'] = []
kwargs['output'] = 'gschemas.compiled'
if state.subdir == '':
targetname = 'gsettings-compile'
else:
targetname = 'gsettings-compile-' + state.subdir
target_g = build.CustomTarget(targetname, state.subdir, kwargs)
return ModuleReturnValue(target_g, [target_g])
@permittedKwargs({'sources', 'media', 'symlink_media', 'languages'})
def yelp(self, state, args, kwargs):
if len(args) < 1:
raise MesonException('Yelp requires a project id')
project_id = args[0]
sources = mesonlib.stringlistify(kwargs.pop('sources', []))
if not sources:
if len(args) > 1:
sources = mesonlib.stringlistify(args[1:])
if not sources:
raise MesonException('Yelp requires a list of sources')
source_str = '@@'.join(sources)
langs = mesonlib.stringlistify(kwargs.pop('languages', []))
media = mesonlib.stringlistify(kwargs.pop('media', []))
symlinks = kwargs.pop('symlink_media', True)
if not isinstance(symlinks, bool):
raise MesonException('symlink_media must be a boolean')
if kwargs:
raise MesonException('Unknown arguments passed: {}'.format(', '.join(kwargs.keys())))
script = [sys.executable, state.environment.get_build_command()]
args = ['--internal',
'yelphelper',
'install',
'--subdir=' + state.subdir,
'--id=' + project_id,
'--installdir=' + os.path.join(state.environment.get_datadir(), 'help'),
'--sources=' + source_str]
if symlinks:
args.append('--symlinks=true')
if media:
args.append('--media=' + '@@'.join(media))
if langs:
args.append('--langs=' + '@@'.join(langs))
inscript = build.RunScript(script, args)
potargs = [state.environment.get_build_command(), '--internal', 'yelphelper', 'pot',
'--subdir=' + state.subdir,
'--id=' + project_id,
'--sources=' + source_str]
pottarget = build.RunTarget('help-' + project_id + '-pot', sys.executable,
potargs, [], state.subdir)
poargs = [state.environment.get_build_command(), '--internal', 'yelphelper', 'update-po',
'--subdir=' + state.subdir,
'--id=' + project_id,
'--sources=' + source_str,
'--langs=' + '@@'.join(langs)]
potarget = build.RunTarget('help-' + project_id + '-update-po', sys.executable,
poargs, [], state.subdir)
rv = [inscript, pottarget, potarget]
return ModuleReturnValue(None, rv)
@permittedKwargs({'main_xml', 'main_sgml', 'src_dir', 'dependencies', 'install',
'install_dir', 'scan_args', 'scanobjs_args', 'gobject_typesfile',
'fixxref_args', 'html_args', 'html_assets', 'content_files',
'mkdb_args'})
def gtkdoc(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Gtkdoc must have one positional argument.')
modulename = args[0]
if not isinstance(modulename, str):
raise MesonException('Gtkdoc arg must be string.')
if 'src_dir' not in kwargs:
raise MesonException('Keyword argument src_dir missing.')
main_file = kwargs.get('main_sgml', '')
if not isinstance(main_file, str):
raise MesonException('Main sgml keyword argument must be a string.')
main_xml = kwargs.get('main_xml', '')
if not isinstance(main_xml, str):
raise MesonException('Main xml keyword argument must be a string.')
if main_xml != '':
if main_file != '':
raise MesonException('You can only specify main_xml or main_sgml, not both.')
main_file = main_xml
targetname = modulename + '-doc'
command = [sys.executable, state.environment.get_build_command()]
namespace = kwargs.get('namespace', '')
mode = kwargs.get('mode', 'auto')
VALID_MODES = ('xml', 'sgml', 'none', 'auto')
if mode not in VALID_MODES:
raise MesonException('gtkdoc: Mode {} is not a valid mode: {}'.format(mode, VALID_MODES))
src_dirs = kwargs['src_dir']
if not isinstance(src_dirs, list):
src_dirs = [src_dirs]
header_dirs = []
for src_dir in src_dirs:
if hasattr(src_dir, 'held_object'):
src_dir = src_dir.held_object
if not isinstance(src_dir, build.IncludeDirs):
raise MesonException('Invalid keyword argument for src_dir.')
for inc_dir in src_dir.get_incdirs():
header_dirs.append(os.path.join(state.environment.get_source_dir(),
src_dir.get_curdir(), inc_dir))
header_dirs.append(os.path.join(state.environment.get_build_dir(),
src_dir.get_curdir(), inc_dir))
else:
header_dirs.append(src_dir)
args = ['--internal', 'gtkdoc',
'--sourcedir=' + state.environment.get_source_dir(),
'--builddir=' + state.environment.get_build_dir(),
'--subdir=' + state.subdir,
'--headerdirs=' + '@@'.join(header_dirs),
'--mainfile=' + main_file,
'--modulename=' + modulename,
'--mode=' + mode]
if namespace:
args.append('--namespace=' + namespace)
args += self._unpack_args('--htmlargs=', 'html_args', kwargs)
args += self._unpack_args('--scanargs=', 'scan_args', kwargs)
args += self._unpack_args('--scanobjsargs=', 'scanobjs_args', kwargs)
args += self._unpack_args('--gobjects-types-file=', 'gobject_typesfile', kwargs, state)
args += self._unpack_args('--fixxrefargs=', 'fixxref_args', kwargs)
args += self._unpack_args('--mkdbargs=', 'mkdb_args', kwargs)
args += self._unpack_args('--html-assets=', 'html_assets', kwargs, state)
args += self._unpack_args('--content-files=', 'content_files', kwargs, state)
args += self._unpack_args('--expand-content-files=', 'expand_content_files', kwargs, state)
args += self._unpack_args('--ignore-headers=', 'ignore_headers', kwargs)
args += self._unpack_args('--installdir=', 'install_dir', kwargs, state)
args += self._get_build_args(kwargs, state)
res = [build.RunTarget(targetname, command[0], command[1:] + args, [], state.subdir)]
if kwargs.get('install', True):
res.append(build.RunScript(command, args))
return ModuleReturnValue(None, res)
def _get_build_args(self, kwargs, state):
args = []
cflags, ldflags, gi_includes = self._get_dependencies_flags(kwargs.get('dependencies', []), state, include_rpath=True)
inc_dirs = kwargs.get('include_directories', [])
if not isinstance(inc_dirs, list):
inc_dirs = [inc_dirs]
for incd in inc_dirs:
if not isinstance(incd.held_object, (str, build.IncludeDirs)):
raise MesonException(
'Gir include dirs should be include_directories().')
cflags.update(get_include_args(inc_dirs))
if cflags:
args += ['--cflags=%s' % ' '.join(cflags)]
if ldflags:
args += ['--ldflags=%s' % ' '.join(ldflags)]
compiler = state.environment.coredata.compilers.get('c')
if compiler:
args += ['--cc=%s' % ' '.join(compiler.get_exelist())]
args += ['--ld=%s' % ' '.join(compiler.get_linker_exelist())]
return args
@noKwargs
def gtkdoc_html_dir(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Must have exactly one argument.')
modulename = args[0]
if not isinstance(modulename, str):
raise MesonException('Argument must be a string')
return ModuleReturnValue(os.path.join('share/gtkdoc/html', modulename), [])
@staticmethod
def _unpack_args(arg, kwarg_name, kwargs, expend_file_state=None):
if kwarg_name not in kwargs:
return []
new_args = kwargs[kwarg_name]
if not isinstance(new_args, list):
new_args = [new_args]
args = []
for i in new_args:
if expend_file_state and isinstance(i, mesonlib.File):
i = os.path.join(expend_file_state.environment.get_build_dir(), i.subdir, i.fname)
elif not isinstance(i, str):
raise MesonException(kwarg_name + ' values must be strings.')
args.append(i)
if args:
return [arg + '@@'.join(args)]
return []
@permittedKwargs({'interface_prefix', 'namespace', 'object_manager'})
def gdbus_codegen(self, state, args, kwargs):
if len(args) != 2:
raise MesonException('Gdbus_codegen takes two arguments, name and xml file.')
namebase = args[0]
xml_file = args[1]
target_name = namebase + '-gdbus'
cmd = [find_program('gdbus-codegen', target_name)]
if 'interface_prefix' in kwargs:
cmd += ['--interface-prefix', kwargs.pop('interface_prefix')]
if 'namespace' in kwargs:
cmd += ['--c-namespace', kwargs.pop('namespace')]
if kwargs.get('object_manager', False):
cmd += ['--c-generate-object-manager']
# https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a
if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.3'):
cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@']
else:
self._print_gdbus_warning()
cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@']
outputs = [namebase + '.c', namebase + '.h']
custom_kwargs = {'input': xml_file,
'output': outputs,
'command': cmd
}
ct = build.CustomTarget(target_name, state.subdir, custom_kwargs)
return ModuleReturnValue(ct, [ct])
@permittedKwargs({'sources', 'c_template', 'h_template', 'install_header', 'install_dir',
'comments', 'identifier_prefix', 'symbol_prefix', 'eprod', 'vprod',
'fhead', 'fprod', 'ftail', 'vhead', 'vtail', 'depends'})
def mkenums(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Mkenums requires one positional argument.')
basename = args[0]
if 'sources' not in kwargs:
raise MesonException('Missing keyword argument "sources".')
sources = kwargs.pop('sources')
if isinstance(sources, str):
sources = [sources]
elif not isinstance(sources, list):
raise MesonException(
'Sources keyword argument must be a string or array.')
cmd = []
known_kwargs = ['comments', 'eprod', 'fhead', 'fprod', 'ftail',
'identifier_prefix', 'symbol_prefix', 'template',
'vhead', 'vprod', 'vtail']
known_custom_target_kwargs = ['install_dir', 'build_always',
'depends', 'depend_files']
c_template = h_template = None
install_header = False
for arg, value in kwargs.items():
if arg == 'sources':
raise AssertionError("sources should've already been handled")
elif arg == 'c_template':
c_template = value
if 'template' in kwargs:
raise MesonException('Mkenums does not accept both '
'c_template and template keyword '
'arguments at the same time.')
elif arg == 'h_template':
h_template = value
if 'template' in kwargs:
raise MesonException('Mkenums does not accept both '
'h_template and template keyword '
'arguments at the same time.')
elif arg == 'install_header':
install_header = value
elif arg in known_kwargs:
cmd += ['--' + arg.replace('_', '-'), value]
elif arg not in known_custom_target_kwargs:
raise MesonException(
'Mkenums does not take a %s keyword argument.' % (arg, ))
cmd = [find_program('glib-mkenums', 'mkenums')] + cmd
custom_kwargs = {}
for arg in known_custom_target_kwargs:
if arg in kwargs:
custom_kwargs[arg] = kwargs[arg]
targets = []
if h_template is not None:
h_output = os.path.splitext(h_template)[0]
# We always set template as the first element in the source array
# so --template consumes it.
h_cmd = cmd + ['--template', '@INPUT@']
h_sources = [h_template] + sources
custom_kwargs['install'] = install_header
if 'install_dir' not in custom_kwargs:
custom_kwargs['install_dir'] = \
state.environment.coredata.get_builtin_option('includedir')
h_target = self._make_mkenum_custom_target(state, h_sources,
h_output, h_cmd,
custom_kwargs)
targets.append(h_target)
if c_template is not None:
c_output = os.path.splitext(c_template)[0]
# We always set template as the first element in the source array
# so --template consumes it.
c_cmd = cmd + ['--template', '@INPUT@']
c_sources = [c_template] + sources
# Never install the C file. Complain on bug tracker if you need it.
custom_kwargs['install'] = False
if h_template is not None:
if 'depends' in custom_kwargs:
custom_kwargs['depends'] += [h_target]
else:
custom_kwargs['depends'] = h_target
c_target = self._make_mkenum_custom_target(state, c_sources,
c_output, c_cmd,
custom_kwargs)
targets.insert(0, c_target)
if c_template is None and h_template is None:
generic_cmd = cmd + ['@INPUT@']
custom_kwargs['install'] = install_header
if 'install_dir' not in custom_kwargs:
custom_kwargs['install_dir'] = \
state.environment.coredata.get_builtin_option('includedir')
target = self._make_mkenum_custom_target(state, sources, basename,
generic_cmd, custom_kwargs)
return ModuleReturnValue(target, [target])
elif len(targets) == 1:
return ModuleReturnValue(targets[0], [targets[0]])
else:
return ModuleReturnValue(targets, targets)
@staticmethod
def _make_mkenum_custom_target(state, sources, output, cmd, kwargs):
custom_kwargs = {
'input': sources,
'output': output,
'capture': True,
'command': cmd
}
custom_kwargs.update(kwargs)
return build.CustomTarget(output, state.subdir, custom_kwargs,
# https://github.com/mesonbuild/meson/issues/973
absolute_paths=True)
@permittedKwargs({'sources', 'prefix', 'install_header', 'install_dir', 'stdinc',
'nostdinc', 'internal', 'skip_source', 'valist_marshallers',
'extra_args'})
def genmarshal(self, state, args, kwargs):
if len(args) != 1:
raise MesonException(
'Genmarshal requires one positional argument.')
output = args[0]
if 'sources' not in kwargs:
raise MesonException('Missing keyword argument "sources".')
sources = kwargs.pop('sources')
if isinstance(sources, str):
sources = [sources]
elif not isinstance(sources, list):
raise MesonException(
'Sources keyword argument must be a string or array.')
new_genmarshal = mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.3')
cmd = [find_program('glib-genmarshal', output + '_genmarshal')]
known_kwargs = ['internal', 'nostdinc', 'skip_source', 'stdinc',
'valist_marshallers', 'extra_args']
known_custom_target_kwargs = ['build_always', 'depends',
'depend_files', 'install_dir',
'install_header']
for arg, value in kwargs.items():
if arg == 'prefix':
cmd += ['--prefix', value]
elif arg == 'extra_args':
if new_genmarshal:
cmd += mesonlib.stringlistify(value)
else:
mlog.warning('The current version of GLib does not support extra arguments \n'
'for glib-genmarshal. You need at least GLib 2.53.3. See ',
mlog.bold('https://github.com/mesonbuild/meson/pull/2049'))
elif arg in known_kwargs and value:
cmd += ['--' + arg.replace('_', '-')]
elif arg not in known_custom_target_kwargs:
raise MesonException(
'Genmarshal does not take a %s keyword argument.' % (
arg, ))
install_header = kwargs.pop('install_header', False)
install_dir = kwargs.pop('install_dir', None)
custom_kwargs = {
'input': sources,
}
# https://github.com/GNOME/glib/commit/0fbc98097fac4d3e647684f344e508abae109fdf
if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.0'):
cmd += ['--output', '@OUTPUT@']
else:
custom_kwargs['capture'] = True
for arg in known_custom_target_kwargs:
if arg in kwargs:
custom_kwargs[arg] = kwargs[arg]
header_file = output + '.h'
custom_kwargs['command'] = cmd + ['--body', '@INPUT@']
if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.4'):
# Silence any warnings about missing prototypes
custom_kwargs['command'] += ['--include-header', header_file]
custom_kwargs['output'] = output + '.c'
body = build.CustomTarget(output + '_c', state.subdir, custom_kwargs)
custom_kwargs['install'] = install_header
if install_dir is not None:
custom_kwargs['install_dir'] = install_dir
if new_genmarshal:
cmd += ['--pragma-once']
custom_kwargs['command'] = cmd + ['--header', '@INPUT@']
custom_kwargs['output'] = header_file
header = build.CustomTarget(output + '_h', state.subdir, custom_kwargs)
rv = [body, header]
return ModuleReturnValue(rv, rv)
@staticmethod
def _vapi_args_to_command(prefix, variable, kwargs, accept_vapi=False):
arg_list = kwargs.get(variable)
if not arg_list:
return []
ret = []
if not isinstance(arg_list, list):
arg_list = [arg_list]
for arg in arg_list:
if not isinstance(arg, str):
types = 'strings' + ' or InternalDependencys' if accept_vapi else ''
raise MesonException('All {} must be {}'.format(variable, types))
ret.append(prefix + arg)
return ret
def _extract_vapi_packages(self, state, kwargs):
'''
Packages are special because we need to:
- Get a list of packages for the .deps file
- Get a list of depends for any VapiTargets
- Get package name from VapiTargets
- Add include dirs for any VapiTargets
'''
arg_list = kwargs.get('packages')
if not arg_list:
return [], [], [], []
if not isinstance(arg_list, list):
arg_list = [arg_list]
vapi_depends = []
vapi_packages = []
vapi_includes = []
ret = []
remaining_args = []
for arg in arg_list:
if hasattr(arg, 'held_object'):
arg = arg.held_object
if isinstance(arg, InternalDependency):
targets = [t for t in arg.sources if isinstance(t, VapiTarget)]
for target in targets:
srcdir = os.path.join(state.environment.get_source_dir(),
target.get_subdir())
outdir = os.path.join(state.environment.get_build_dir(),
target.get_subdir())
outfile = target.get_outputs()[0][:-5] # Strip .vapi
ret.append('--vapidir=' + outdir)
ret.append('--girdir=' + outdir)
ret.append('--pkg=' + outfile)
vapi_depends.append(target)
vapi_packages.append(outfile)
vapi_includes.append(srcdir)
else:
vapi_packages.append(arg)
remaining_args.append(arg)
kwargs['packages'] = remaining_args
vapi_args = ret + self._vapi_args_to_command('--pkg=', 'packages', kwargs, accept_vapi=True)
return vapi_args, vapi_depends, vapi_packages, vapi_includes
def _generate_deps(self, state, library, packages, install_dir):
outdir = state.environment.scratch_dir
fname = os.path.join(outdir, library + '.deps')
with open(fname, 'w') as ofile:
for package in packages:
ofile.write(package + '\n')
return build.Data(mesonlib.File(True, outdir, fname), install_dir)
def _get_vapi_link_with(self, target):
link_with = []
for dep in target.get_target_dependencies():
if isinstance(dep, build.SharedLibrary):
link_with.append(dep)
elif isinstance(dep, GirTarget):
link_with += self._get_vapi_link_with(dep)
return link_with
@permittedKwargs({'sources', 'packages', 'metadata_dirs', 'gir_dirs',
'vapi_dirs', 'install', 'install_dir'})
def generate_vapi(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('The library name is required')
if not isinstance(args[0], str):
raise MesonException('The first argument must be the name of the library')
created_values = []
library = args[0]
build_dir = os.path.join(state.environment.get_build_dir(), state.subdir)
source_dir = os.path.join(state.environment.get_source_dir(), state.subdir)
pkg_cmd, vapi_depends, vapi_packages, vapi_includes = self._extract_vapi_packages(state, kwargs)
cmd = [find_program('vapigen', 'Vaapi')]
cmd += ['--quiet', '--library=' + library, '--directory=' + build_dir]
cmd += self._vapi_args_to_command('--vapidir=', 'vapi_dirs', kwargs)
cmd += self._vapi_args_to_command('--metadatadir=', 'metadata_dirs', kwargs)
cmd += self._vapi_args_to_command('--girdir=', 'gir_dirs', kwargs)
cmd += pkg_cmd
cmd += ['--metadatadir=' + source_dir]
inputs = kwargs.get('sources')
if not inputs:
raise MesonException('sources are required to generate the vapi file')
if not isinstance(inputs, list):
inputs = [inputs]
link_with = []
for i in inputs:
if isinstance(i, str):
cmd.append(os.path.join(source_dir, i))
elif hasattr(i, 'held_object') and isinstance(i.held_object, GirTarget):
link_with += self._get_vapi_link_with(i.held_object)
subdir = os.path.join(state.environment.get_build_dir(),
i.held_object.get_subdir())
gir_file = os.path.join(subdir, i.held_object.get_outputs()[0])
cmd.append(gir_file)
else:
raise MesonException('Input must be a str or GirTarget')
vapi_output = library + '.vapi'
custom_kwargs = {
'command': cmd,
'input': inputs,
'output': vapi_output,
'depends': vapi_depends,
}
install_dir = kwargs.get('install_dir',
os.path.join(state.environment.coredata.get_builtin_option('datadir'),
'vala', 'vapi'))
if kwargs.get('install'):
custom_kwargs['install'] = kwargs['install']
custom_kwargs['install_dir'] = install_dir
# We shouldn't need this locally but we install it
deps_target = self._generate_deps(state, library, vapi_packages, install_dir)
created_values.append(deps_target)
vapi_target = VapiTarget(vapi_output, state.subdir, custom_kwargs)
# So to try our best to get this to just work we need:
# - link with with the correct library
# - include the vapi and dependent vapi files in sources
# - add relevant directories to include dirs
incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]
sources = [vapi_target] + vapi_depends
rv = InternalDependency(None, incs, [], [], link_with, sources, [])
created_values.append(rv)
return ModuleReturnValue(rv, created_values)
def initialize():
return GnomeModule()
| {
"content_hash": "90db1a3f3f2eaba6431ed3fb84dd98d4",
"timestamp": "",
"source": "github",
"line_count": 1170,
"max_line_length": 133,
"avg_line_length": 47.12051282051282,
"alnum_prop": 0.5462625383178248,
"repo_name": "fmuellner/meson",
"id": "685a86befa89365aae6d3c47c186dca59df2e97c",
"size": "55724",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mesonbuild/modules/gnome.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "2568"
},
{
"name": "Batchfile",
"bytes": "795"
},
{
"name": "C",
"bytes": "106020"
},
{
"name": "C#",
"bytes": "631"
},
{
"name": "C++",
"bytes": "17456"
},
{
"name": "CMake",
"bytes": "1670"
},
{
"name": "D",
"bytes": "2026"
},
{
"name": "Emacs Lisp",
"bytes": "1226"
},
{
"name": "Fortran",
"bytes": "1946"
},
{
"name": "Genie",
"bytes": "341"
},
{
"name": "Inno Setup",
"bytes": "372"
},
{
"name": "Java",
"bytes": "994"
},
{
"name": "JavaScript",
"bytes": "136"
},
{
"name": "LLVM",
"bytes": "75"
},
{
"name": "Lex",
"bytes": "110"
},
{
"name": "Meson",
"bytes": "198263"
},
{
"name": "Objective-C",
"bytes": "699"
},
{
"name": "Objective-C++",
"bytes": "332"
},
{
"name": "Protocol Buffer",
"bytes": "92"
},
{
"name": "Python",
"bytes": "1275139"
},
{
"name": "Roff",
"bytes": "232"
},
{
"name": "Rust",
"bytes": "762"
},
{
"name": "Shell",
"bytes": "1787"
},
{
"name": "Swift",
"bytes": "1152"
},
{
"name": "Vala",
"bytes": "8308"
},
{
"name": "Vim script",
"bytes": "9434"
},
{
"name": "Yacc",
"bytes": "50"
}
],
"symlink_target": ""
} |
'''
Handles all Login/Sign Up logic - calling on database module where needed
'''
import os
import ast
import hashlib
from passman.functions import getUserInput, quit
from passman.database import checkUserCredentials, addUser, \
setDBUsername, pullDatabase, checkConnection
from passman.JSON import setOfflineUsername
############################################################
# Login Functions
############################################################
def handleLogin():
'''
Handles main menu login/signup functionality
'''
prompt = "Do you want to log in or start a new account?\n"\
+ "(Enter the number of your choice)\n\n" \
+ "(1) Log In\n"\
+ "(2) Start New Account\n"
option = getUserInput(prompt)
if option == "1":
loginUser()
elif option == "2":
signUpUser()
else:
print("Please enter a valid option\n")
handleLogin() # back to the beginning
def loginUser(username=""):
'''
Handles login for online database
'''
isCommandLine = username
username = username if username else getUserInput("Please enter your username")
if not checkConnection("test"):
handleOfflineLogin(username)
pw = getUserInput("Please enter your password", True)
inc = 0
while not checkUserCredentials(pw, username) and inc < 2:
print("Sorry, that doesn't match our records")
pw = getUserInput("Please enter your password", True)
inc += 1
if inc >= 2: # three strikes; you're out
quit()
setDBUsername(pw, username)
pullDatabase()
return True
def signUpUser():
'''
Handles sign up for new users
'''
if not checkConnection("test"):
print("Sorry - cannot create user without internet connection")
quit()
username = getUserInput("Please enter your username")
pw = getUserInput("Please enter your password", True)
if addUser(username, pw):
setDBUsername(pw, username)
pullDatabase()
return True
else:
print("Sorry, that username is already taken")
signUpUser()
def getOfflineUsername(username=""):
'''
Checks if a user has a local database saved. Reprompts for new
username if none is found.
'''
if username:
file_path = os.path.expanduser("~/.passman/{}.json".format(username))
else:
username = getUserInput("Please enter your username")
file_path = os.path.expanduser("~/.passman/{}.json".format(username))
while (not os.path.isfile(file_path)) or (file_path == ".json"):
print("Sorry, that doesn't match our records")
username = getUserInput("Please enter your username")
file_path = os.path.expanduser("~/.passman/{}.json".format(username))
return username
def getOfflinePassword(data):
'''
Checks a password against that stored in the local database
'''
pw = getUserInput("Please enter your password", True)
key = hashlib.sha256(pw.encode()).digest()
hashedpw = hashlib.sha512(pw.encode('utf-8')).hexdigest()
inc = 0
while hashedpw != data['password'] and inc < 2:
print("Wrong password")
pw = getUserInput("Please enter your password", True)
key = hashlib.sha256(pw.encode()).digest()
hashedpw = hashlib.sha512(pw.encode('utf-8')).hexdigest()
inc += 1
if inc >= 2: # three strikes; you're out
quit()
return key
def handleOfflineLogin(username=""):
'''
Logs in users to local database in lieu of internet connection
'''
print("NOTE: No connection")
print("Continuing in offline mode. \nYou can retrieve any service data, " \
+"but you will not be \nable to edit or upload data\n\n")
username = getOfflineUsername(username)
file_path = os.path.expanduser("~/.passman/{}.json".format(username))
dir_path = os.path.expanduser("~/.passman")
if not os.path.isdir(dir_path):
print("Sorry, no local users found")
quit()
with open(file_path) as data_file:
data = data_file.read()
if data:
data = ast.literal_eval(data) # from string to dict
else:
print("Sorry, cannot get data")
quit()
key = getOfflinePassword(data)
setOfflineUsername(username, key)
return True
| {
"content_hash": "c7792e3a5931f9d8ec7db63c24252892",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 83,
"avg_line_length": 30.22758620689655,
"alnum_prop": 0.6103125712981976,
"repo_name": "regexpressyourself/passman",
"id": "022891304bbc089124b263a4d2597f7e10d62dbf",
"size": "4383",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "passman/login.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "37175"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function, unicode_literals
__author__ = 'Iacopo Spalletti'
__email__ = 'i.spalletti@nephila.it'
__version__ = '1.0.0.dev1'
default_app_config = 'djangocms_blog.apps.BlogAppConfig'
| {
"content_hash": "8120307c25b919cc048e3beef457d996",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 72,
"avg_line_length": 32.57142857142857,
"alnum_prop": 0.7149122807017544,
"repo_name": "skirsdeda/djangocms-blog",
"id": "a909fc3950b8280ebd7e20df086d3fb756e3bee0",
"size": "252",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "djangocms_blog/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "761"
},
{
"name": "HTML",
"bytes": "13597"
},
{
"name": "JavaScript",
"bytes": "1055"
},
{
"name": "Makefile",
"bytes": "969"
},
{
"name": "Python",
"bytes": "323129"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aquifers', '0017_auto_20190326_0240'),
]
operations = [
migrations.AlterModelOptions(
name='aquifermaterial',
options={'ordering': ['code'], 'verbose_name_plural': 'Aquifer Material Codes'},
),
migrations.RemoveField(
model_name='waterrightslicence',
name='updated_date',
),
migrations.AlterField(
model_name='waterrightslicence',
name='aquifer_licence_id',
field=models.AutoField(primary_key=True, serialize=False, verbose_name='Aquifer Licence ID Number'),
),
migrations.AlterField(
model_name='waterrightspurpose',
name='display_order',
field=models.PositiveIntegerField(default=0),
),
]
| {
"content_hash": "05cc457cf83b3d1d52d6b6175ce0c1b8",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 112,
"avg_line_length": 31.103448275862068,
"alnum_prop": 0.5898004434589801,
"repo_name": "bcgov/gwells",
"id": "4924c0be839dd3aade59d9e67d3a4a642eb404ef",
"size": "951",
"binary": false,
"copies": "1",
"ref": "refs/heads/release",
"path": "app/backend/aquifers/migrations/0018_auto_20190407_1905.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "519"
},
{
"name": "Dockerfile",
"bytes": "4104"
},
{
"name": "Groovy",
"bytes": "89156"
},
{
"name": "HTML",
"bytes": "10079"
},
{
"name": "JavaScript",
"bytes": "271010"
},
{
"name": "Makefile",
"bytes": "807"
},
{
"name": "Python",
"bytes": "1550542"
},
{
"name": "SCSS",
"bytes": "7409"
},
{
"name": "Shell",
"bytes": "46319"
},
{
"name": "Vue",
"bytes": "833800"
}
],
"symlink_target": ""
} |
"""Test out a model.
For Jetson devices, you must specify a .pb model:
python3 examples/benchmark_demo.py --model=data/traffic_model_tftrt.pb
For Coral devices, you must specify a tflite or _edgetpu.tflite model:
python3 examples/benchmark_demo.py --model=data/traffic_model_edgetpu.tflite
"""
import argparse
import time
import numpy as np
from automl_video_ondevice import object_tracking as vot
import utils
default_model = 'data/traffic_model_edgetpu.tflite'
default_labels = 'data/traffic_label_map.pbtxt'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--model', help='model path', default=default_model)
parser.add_argument(
'--labels', help='label file path', default=default_labels)
parser.add_argument(
'--threshold', type=float, default=0.2, help='class score threshold')
args = parser.parse_args()
print('Loading %s with %s labels.' % (args.model, args.labels))
config = vot.ObjectTrackingConfig(score_threshold=args.threshold)
engine = vot.load(args.model, args.labels, config)
input_size = engine.input_size()
fps_calculator = utils.FpsCalculator()
blank_image = np.zeros((input_size.height, input_size.width, 3),
dtype=np.uint8)
while True:
# Run inference engine to populate annotations array.
annotations = []
timestamp = int(round(time.time() * 1000))
engine.run(timestamp, blank_image, annotations)
# Calculate FPS and latency.
fps, latency = fps_calculator.measure()
print('FPS: {}\t\t\tLatency: {}ms'.format(fps, latency))
if __name__ == '__main__':
main()
| {
"content_hash": "18c646c70232f33c310a6dda8efb4893",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 78,
"avg_line_length": 30.903846153846153,
"alnum_prop": 0.7000622277535781,
"repo_name": "google/automl-video-ondevice",
"id": "525c1f28d76504d75e61ecaf56728cbfd1d5bb1b",
"size": "2282",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/benchmark_demo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HCL",
"bytes": "782"
},
{
"name": "Python",
"bytes": "71478"
},
{
"name": "Shell",
"bytes": "240"
}
],
"symlink_target": ""
} |
from jingo import register
from django.template import Context
from django.template.loader import get_template
@register.function
def bootstrapform(form):
template = get_template("bootstrapform/form.html")
context = Context({'form': form})
return template.render(context)
@register.function
def invalid_form(form):
"""return true if the form is bound and invalid"""
return form.is_bound and not form.is_valid()
| {
"content_hash": "9f78f369a57100533aff47853b49192e",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 54,
"avg_line_length": 27.1875,
"alnum_prop": 0.7448275862068966,
"repo_name": "peterbe/airmozilla",
"id": "dbb49d7e2d5b9c6cce5a6a3583a0c52e8c56c3ca",
"size": "435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airmozilla/manage/helpers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "70585"
},
{
"name": "JavaScript",
"bytes": "10192"
},
{
"name": "Puppet",
"bytes": "6677"
},
{
"name": "Python",
"bytes": "1235514"
},
{
"name": "Shell",
"bytes": "3672"
}
],
"symlink_target": ""
} |
import os
from django.conf import settings
from django.core.urlresolvers import clear_url_caches
from django.db import models
from django.db.models.signals import pre_delete, post_delete
from django.utils.encoding import smart_str
from django.utils.translation import get_language, get_language_info, ugettext_lazy, ugettext as _
from . import utils
from .managers import TranslatableManager
USE_ELFINDER = False
try:
from elfinder.fields import ElfinderField
USE_ELFINDER = 'elfinder' in settings.INSTALLED_APPS
except ImportError:
USE_ELFINDER = False
if USE_ELFINDER:
make_imagefield = lambda: ElfinderField(optionset='image', start_path='languages',
verbose_name=ugettext_lazy('Image'), blank=True, null=True)
else:
def _upload_to(instance, filename):
ext = os.path.splitext(filename)[-1]
return os.path.join('languages', '%s%s' % (instance.name, ext))
make_imagefield = lambda: models.ImageField(upload_to=_upload_to, verbose_name=ugettext_lazy('Image'), blank=True,
null=True)
class Language(models.Model):
"""
This model stores the project's available languages. A user may edit
languages from the admin interface. At least one language
must always be stored in the database. One (and only one) language
must always be set as the 'default'. Methods of this model and other
aspects of yawd-translations guarantee that these constraints are
always met.
The languages among which a user may choose are those defined in the
`LANGUAGES <https://docs.djangoproject.com/en/dev/ref/settings/#languages>`_
django setting.
"""
# Use name as primary key to avoid joins when retrieving Translation objects
name = models.CharField(choices=sorted(settings.LANGUAGES, key=lambda name: name[1]), max_length=7,
verbose_name=ugettext_lazy('Name'), primary_key=True)
image = models.CharField(max_length=50)
default = models.BooleanField(default=False, verbose_name=ugettext_lazy('Default'))
order = models.IntegerField(default=0, verbose_name=ugettext_lazy('Order'))
class Meta:
verbose_name = ugettext_lazy("Language")
verbose_name_plural = ugettext_lazy("Languages")
ordering = ['order', 'name']
permissions = (
("view_translations", "Can see translation messages for a language"),
("edit_translations", "Can edit the language's translation messages"),
)
def _default_changed(self):
# change the default language for this thread
clear_url_caches()
utils._default = self.name
def save(self, *args, **kwargs):
"""
Override the default save() method to ensure that one and only
one default language exists.
"""
try:
# not using get_default_language() here, as this method might return
# the settings.LANGUAGE_CODE setting if no db languages exist
default = Language.objects.get(default=True)
# check if the default language just changed
if self.default and self != default:
# make sure only one default language exists
default.default = False
default.save()
self._default_changed()
except Language.DoesNotExist:
# no default language was found
# force this as the default
self.default = True
self._default_changed()
super(Language, self).save(*args, **kwargs)
# this might produce a little overhead, but it's necessary:
# the state of _supported could be unpredictable by now
utils._supported = [smart_str(l) for l in Language.objects.values_list('name', flat=True)]
def delete(self):
"""
Deleting the default language is not allowed.
"""
if not self.default:
super(Language, self).delete()
def __unicode__(self):
"""
Return the display name for this language.
"""
return get_language_info(self.name)['name']
class Translatable(models.Model):
"""
This model should be subclassed by models that need multilingual
support.
A Translatable object should only define members
that are **common** to all languages. To define multilingual
fields, a subclass of the :class:`translations.models.Translation`
model must be implemented.
"""
objects = TranslatableManager()
class Meta:
abstract = True
def get_name(self, language_id=None):
"""
Get the related :class:`translations.models.Translation`
object's display name for a given ``language``.
"""
# use the current language if not explicitly set
translation = self.translation(language_id)
if translation:
return unicode(translation)
# attempt to show default translation
translation = self.translation(utils.get_default_language())
if translation:
return u'%s (%s %s)' % (translation, _('not translated in'), language_id if language_id else get_language())
else:
return u'%s #%s (%s %s)' % (
self._meta.verbose_name, self.pk, _('not translated in'),
language_id if language_id else get_language())
def translation(self, language_id=None):
"""
Get translation for the language ``language_id``. If no argument
is given, return the current language translation.
Always use this method if you need to access a translation,
since it does not generate extra queries.
"""
if not language_id:
language_id = get_language()
# using prefetched translations
for l in self.translations.all():
if l.language_id == language_id:
return l
def __unicode__(self):
"""
This default implementation returns the unicode representation of
the related :class:`translations.models.Translation` object
for the current language.
"""
return self.get_name()
def save(self, *args, **kwargs):
"""
Clear prefetched translations
"""
ret = super(Translatable, self).save(*args, **kwargs)
if hasattr(self, '_prefetched_objects_cache') and 'translations' in self._prefetched_objects_cache:
del self._prefetched_objects_cache['translations']
return ret
class Translation(models.Model):
"""
This model represents the translations of a
:class:`translations.models.Translatable` model. There always exist
a ``ForeignKey`` to the Translatable object, and the reverse relation
should be named `'translations'`.
"""
language = models.ForeignKey(Language)
class Meta:
abstract = True
def pre_delete_language(sender, instance, using, **kwargs):
"""
**Signal receiver**. Although admin actions make sure the default language will
not be deleted, this receiver is still here to prevent 3rd party code from
accidentally deleting the default language
"""
if instance.default:
raise Exception(_("Cannot delete the default language"))
def post_delete_language(sender, instance, using, **kwargs):
"""
**Signal receiver**. Update the supported languages to ensure that
a 404 will be raised when requesting the language's urls
"""
# generate supported languages in case they are not initialized
utils.get_supported_languages()
utils._supported.remove(instance.name)
pre_delete.connect(pre_delete_language, sender=Language, dispatch_uid='language-pre-delete')
post_delete.connect(post_delete_language, sender=Language, dispatch_uid='language-post-delete')
| {
"content_hash": "8d91056ac2425a10e88528dea5aec20f",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 120,
"avg_line_length": 37.39336492890995,
"alnum_prop": 0.6467680608365018,
"repo_name": "egemsoft/esef-yawd-translation",
"id": "ec25a86a7e29db76de4afa9a5f67d1a9f36b07f7",
"size": "7890",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "translations/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "13179"
},
{
"name": "Python",
"bytes": "49324"
}
],
"symlink_target": ""
} |
"""
Low-level objects providing an abstraction for the objects involved in the calculation.
"""
import abc
import collections
from enum import Enum
from pprint import pformat
import numpy as np
from monty.collections import AttrDict
from monty.design_patterns import singleton
from monty.json import MontyDecoder, MontyEncoder, MSONable
from pymatgen.core import units
from pymatgen.core.lattice import Lattice
from pymatgen.core.structure import Structure
from pymatgen.core.units import ArrayWithUnit
from pymatgen.util.serialization import pmg_serialize
def lattice_from_abivars(cls=None, *args, **kwargs):
"""
Returns a `Lattice` object from a dictionary
with the Abinit variables `acell` and either `rprim` in Bohr or `angdeg`
If acell is not given, the Abinit default is used i.e. [1,1,1] Bohr
Args:
cls: Lattice class to be instantiated. pymatgen.core.lattice.Lattice if `cls` is None
Example:
lattice_from_abivars(acell=3*[10], rprim=np.eye(3))
"""
cls = Lattice if cls is None else cls
kwargs.update(dict(*args))
d = kwargs
rprim = d.get("rprim", None)
angdeg = d.get("angdeg", None)
acell = d["acell"]
if rprim is not None:
if angdeg is not None:
raise ValueError("angdeg and rprimd are mutually exclusive")
rprim = np.reshape(rprim, (3, 3))
rprimd = [float(acell[i]) * rprim[i] for i in range(3)]
# Call pymatgen constructors (note that pymatgen uses Angstrom instead of Bohr).
return cls(ArrayWithUnit(rprimd, "bohr").to("ang"))
if angdeg is not None:
angdeg = np.reshape(angdeg, 3)
if np.any(angdeg <= 0.0):
raise ValueError("Angles must be > 0 but got %s" % str(angdeg))
if angdeg.sum() >= 360.0:
raise ValueError("The sum of angdeg must be lower that 360, angdeg %s" % str(angdeg))
# This code follows the implementation in ingeo.F90
# See also http://www.abinit.org/doc/helpfiles/for-v7.8/input_variables/varbas.html#angdeg
tol12 = 1e-12
pi, sin, cos, sqrt = np.pi, np.sin, np.cos, np.sqrt
rprim = np.zeros((3, 3))
if (
abs(angdeg[0] - angdeg[1]) < tol12
and abs(angdeg[1] - angdeg[2]) < tol12
and abs(angdeg[0] - 90.0) + abs(angdeg[1] - 90.0) + abs(angdeg[2] - 90) > tol12
):
# Treat the case of equal angles (except all right angles):
# generates trigonal symmetry wrt third axis
cosang = cos(pi * angdeg[0] / 180.0)
a2 = 2.0 / 3.0 * (1.0 - cosang)
aa = sqrt(a2)
cc = sqrt(1.0 - a2)
rprim[0, 0] = aa
rprim[0, 1] = 0.0
rprim[0, 2] = cc
rprim[1, 0] = -0.5 * aa
rprim[1, 1] = sqrt(3.0) * 0.5 * aa
rprim[1, 2] = cc
rprim[2, 0] = -0.5 * aa
rprim[2, 1] = -sqrt(3.0) * 0.5 * aa
rprim[2, 2] = cc
else:
# Treat all the other cases
rprim[0, 0] = 1.0
rprim[1, 0] = cos(pi * angdeg[2] / 180.0)
rprim[1, 1] = sin(pi * angdeg[2] / 180.0)
rprim[2, 0] = cos(pi * angdeg[1] / 180.0)
rprim[2, 1] = (cos(pi * angdeg[0] / 180.0) - rprim[1, 0] * rprim[2, 0]) / rprim[1, 1]
rprim[2, 2] = sqrt(1.0 - rprim[2, 0] ** 2 - rprim[2, 1] ** 2)
# Call pymatgen constructors (note that pymatgen uses Angstrom instead of Bohr).
rprimd = [float(acell[i]) * rprim[i] for i in range(3)]
return cls(ArrayWithUnit(rprimd, "bohr").to("ang"))
raise ValueError("Don't know how to construct a Lattice from dict:\n%s" % pformat(d))
def structure_from_abivars(cls=None, *args, **kwargs):
"""
Build a :class:`Structure` object from a dictionary with ABINIT variables.
Args:
cls: Structure class to be instantiated. pymatgen.core.structure.Structure if cls is None
example:
al_structure = structure_from_abivars(
acell=3*[7.5],
rprim=[0.0, 0.5, 0.5,
0.5, 0.0, 0.5,
0.5, 0.5, 0.0],
typat=1,
xred=[0.0, 0.0, 0.0],
ntypat=1,
znucl=13,
)
`xred` can be replaced with `xcart` or `xangst`.
"""
kwargs.update(dict(*args))
d = kwargs
cls = Structure if cls is None else cls
# lattice = Lattice.from_dict(d, fmt="abivars")
lattice = lattice_from_abivars(**d)
coords, coords_are_cartesian = d.get("xred", None), False
if coords is None:
coords = d.get("xcart", None)
if coords is not None:
if "xangst" in d:
raise ValueError("xangst and xcart are mutually exclusive")
coords = ArrayWithUnit(coords, "bohr").to("ang")
else:
coords = d.get("xangst", None)
coords_are_cartesian = True
if coords is None:
raise ValueError("Cannot extract coordinates from:\n %s" % str(d))
coords = np.reshape(coords, (-1, 3))
znucl_type, typat = d["znucl"], d["typat"]
if not isinstance(znucl_type, collections.abc.Iterable):
znucl_type = [znucl_type]
if not isinstance(typat, collections.abc.Iterable):
typat = [typat]
if len(typat) != len(coords):
raise ValueError("len(typat) != len(coords):\ntypat: %s\ncoords: %s" % (typat, coords))
# Note conversion to int and Fortran --> C indexing
typat = np.array(typat, dtype=np.int_)
species = [znucl_type[typ - 1] for typ in typat]
return cls(
lattice,
species,
coords,
validate_proximity=False,
to_unit_cell=False,
coords_are_cartesian=coords_are_cartesian,
)
def species_by_znucl(structure):
"""
Return list of unique specie found in structure **ordered according to sites**.
Example:
Site0: 0.5 0 0 O
Site1: 0 0 0 Si
produces [Specie_O, Specie_Si] and not set([Specie_O, Specie_Si]) as in `types_of_specie`
"""
# Please, do not change this algorithm and DO NOT USE set.
# This logic produces a deterministic order of the species based on their first occurrence in sites.
# This is something one can easily implement in Fortran to facilitate interoperability between pymatgen
# and Abinit. Most importantly, we can reuse all the DFPT results produced so far in which the
# old version of structure.types_of_specie (equivalent to this one) was used!
types = []
for site in structure:
for sp, v in site.species.items():
if sp not in types and v != 0:
types.append(sp)
return types
def structure_to_abivars(structure, enforce_znucl=None, enforce_typat=None, **kwargs):
"""
Receives a structure and returns a dictionary with ABINIT variables.
Args:
enforce_znucl: List of ntypat entries with the value of Z for each type of atom.
Used to change the default ordering.
enforce_typat: List with natom entries with the type index.
Fortran conventions: start to count from 1.
Used to change the default ordering.
"""
if not structure.is_ordered:
raise ValueError(
"""\
Received disordered structure with partial occupancies that cannot be converted into an Abinit input.
Please use OrderDisorderedStructureTransformation or EnumerateStructureTransformation
to build an appropriate supercell from partial occupancies or, alternatively, use the Rigid Band Model
or the Virtual Crystal Approximation."""
)
natom = structure.num_sites
ntypat = structure.ntypesp
enforce_order = False
if enforce_znucl is not None or enforce_typat is not None:
enforce_order = True
# consistency check
if enforce_znucl is None or enforce_typat is None:
raise ValueError("Both enforce_znucl and enforce_typat are required!")
if len(enforce_typat) != len(structure):
raise ValueError(
"enforce_typat contains %d entries while it should be natom: %s" % (len(enforce_typat)),
len(structure),
)
if len(enforce_znucl) != ntypat:
raise ValueError(
"enforce_znucl contains %d entries while it should be ntypat: %s" % (len(enforce_znucl)),
ntypat,
)
if not enforce_order:
types_of_specie = species_by_znucl(structure)
# types_of_specie = structure.types_of_species
# [ntypat] list
znucl_type = [specie.number for specie in types_of_specie]
typat = np.zeros(natom, np.int_)
for atm_idx, site in enumerate(structure):
typat[atm_idx] = types_of_specie.index(site.specie) + 1
else:
znucl_type = enforce_znucl
typat = enforce_typat
rprim = ArrayWithUnit(structure.lattice.matrix, "ang").to("bohr")
angdeg = structure.lattice.angles
xred = np.reshape([site.frac_coords for site in structure], (-1, 3))
# Set small values to zero. This usually happens when the CIF file
# does not give structure parameters with enough digits.
rprim = np.where(np.abs(rprim) > 1e-8, rprim, 0.0)
xred = np.where(np.abs(xred) > 1e-8, xred, 0.0)
# Info on atoms.
d = dict(
natom=natom,
ntypat=ntypat,
typat=typat,
znucl=znucl_type,
xred=xred,
)
# Add info on the lattice.
# Should we use (rprim, acell) or (angdeg, acell) to specify the lattice?
geomode = kwargs.pop("geomode", "rprim")
if geomode == "automatic":
geomode = "rprim"
if structure.lattice.is_hexagonal: # or structure.lattice.is_rhombohedral
geomode = "angdeg"
angdeg = structure.lattice.angles
# Here one could polish a bit the numerical values if they are not exact.
# Note that in pmg the angles are 12, 20, 01 while in Abinit 12, 02, 01
# One should make sure that the orientation is preserved (see Curtarolo's settings)
if geomode == "rprim":
d.update(
acell=3 * [1.0],
rprim=rprim,
)
elif geomode == "angdeg":
d.update(
acell=ArrayWithUnit(structure.lattice.abc, "ang").to("bohr"),
angdeg=angdeg,
)
else:
raise ValueError("Wrong value for geomode: %s" % geomode)
return d
def contract(s):
"""
>>> assert contract("1 1 1 2 2 3") == "3*1 2*2 1*3"
>>> assert contract("1 1 3 2 3") == "2*1 1*3 1*2 1*3"
"""
if not s:
return s
tokens = s.split()
old = tokens[0]
count = [[1, old]]
for t in tokens[1:]:
if t == old:
count[-1][0] += 1
else:
old = t
count.append([1, t])
return " ".join("%d*%s" % (c, t) for c, t in count)
class AbivarAble(metaclass=abc.ABCMeta):
"""
An `AbivarAble` object provides a method `to_abivars`
that returns a dictionary with the abinit variables.
"""
@abc.abstractmethod
def to_abivars(self):
"""Returns a dictionary with the abinit variables."""
# @abc.abstractmethod
# def from_abivars(cls, vars):
# """Build the object from a dictionary with Abinit variables."""
def __str__(self):
return pformat(self.to_abivars(), indent=1, width=80, depth=None)
def __contains__(self, key):
return key in self.to_abivars()
@singleton
class MandatoryVariable:
"""
Singleton used to tag mandatory variables, just because I can use
the cool syntax: variable is MANDATORY!
"""
@singleton
class DefaultVariable:
"""Singleton used to tag variables that will have the default value"""
MANDATORY = MandatoryVariable()
DEFAULT = DefaultVariable()
class SpinMode(
collections.namedtuple("SpinMode", "mode nsppol nspinor nspden"),
AbivarAble,
MSONable,
):
"""
Different configurations of the electron density as implemented in abinit:
One can use as_spinmode to construct the object via SpinMode.as_spinmode
(string) where string can assume the values:
- polarized
- unpolarized
- afm (anti-ferromagnetic)
- spinor (non-collinear magnetism)
- spinor_nomag (non-collinear, no magnetism)
"""
@classmethod
def as_spinmode(cls, obj):
"""Converts obj into a `SpinMode` instance"""
if isinstance(obj, cls):
return obj
# Assume a string with mode
try:
return _mode2spinvars[obj]
except KeyError:
raise KeyError("Wrong value for spin_mode: %s" % str(obj))
def to_abivars(self):
"""Dictionary with Abinit input variables."""
return {
"nsppol": self.nsppol,
"nspinor": self.nspinor,
"nspden": self.nspden,
}
@pmg_serialize
def as_dict(self):
"""Convert object to dict."""
return {k: getattr(self, k) for k in self._fields}
@classmethod
def from_dict(cls, d):
"""Build object from dict."""
return cls(**{k: d[k] for k in d if k in cls._fields})
# An handy Multiton
_mode2spinvars = {
"unpolarized": SpinMode("unpolarized", 1, 1, 1),
"polarized": SpinMode("polarized", 2, 1, 2),
"afm": SpinMode("afm", 1, 1, 2),
"spinor": SpinMode("spinor", 1, 2, 4),
"spinor_nomag": SpinMode("spinor_nomag", 1, 2, 1),
}
class Smearing(AbivarAble, MSONable):
"""
Variables defining the smearing technique. The preferred way to instanciate
a `Smearing` object is via the class method Smearing.as_smearing(string)
"""
#: Mapping string_mode --> occopt
_mode2occopt = {
"nosmearing": 1,
"fermi_dirac": 3,
"marzari4": 4,
"marzari5": 5,
"methfessel": 6,
"gaussian": 7,
}
def __init__(self, occopt, tsmear):
"""Build object with occopt and tsmear"""
self.occopt = occopt
self.tsmear = tsmear
def __str__(self):
s = "occopt %d # %s Smearing\n" % (self.occopt, self.mode)
if self.tsmear:
s += "tsmear %s" % self.tsmear
return s
def __eq__(self, other):
return self.occopt == other.occopt and np.allclose(self.tsmear, other.tsmear)
def __ne__(self, other):
return not self == other
def __bool__(self):
return self.mode != "nosmearing"
# py2 old version
__nonzero__ = __bool__
@classmethod
def as_smearing(cls, obj):
"""
Constructs an instance of `Smearing` from obj. Accepts obj in the form:
* Smearing instance
* "name:tsmear" e.g. "gaussian:0.004" (Hartree units)
* "name:tsmear units" e.g. "gaussian:0.1 eV"
* None --> no smearing
"""
if obj is None:
return Smearing.nosmearing()
if isinstance(obj, cls):
return obj
# obj is a string
if obj == "nosmearing":
return cls.nosmearing()
obj, tsmear = obj.split(":")
obj.strip()
occopt = cls._mode2occopt[obj]
try:
tsmear = float(tsmear)
except ValueError:
tsmear, unit = tsmear.split()
tsmear = units.Energy(float(tsmear), unit).to("Ha")
return cls(occopt, tsmear)
@property
def mode(self):
"""String with smearing technique."""
for (mode_str, occopt) in self._mode2occopt.items():
if occopt == self.occopt:
return mode_str
raise AttributeError("Unknown occopt %s" % self.occopt)
@staticmethod
def nosmearing():
"""Build object for calculations without smearing."""
return Smearing(1, 0.0)
def to_abivars(self):
"""Return dictionary with Abinit variables."""
if self.mode == "nosmearing":
return {"occopt": 1, "tsmear": 0.0}
return {"occopt": self.occopt, "tsmear": self.tsmear}
@pmg_serialize
def as_dict(self):
"""json friendly dict representation of Smearing"""
return {"occopt": self.occopt, "tsmear": self.tsmear}
@staticmethod
def from_dict(d):
"""Build object from dict."""
return Smearing(d["occopt"], d["tsmear"])
class ElectronsAlgorithm(dict, AbivarAble, MSONable):
"""Variables controlling the SCF/NSCF algorithm."""
# None indicates that we use abinit defaults.
_DEFAULT = dict(
iprcell=None,
iscf=None,
diemac=None,
diemix=None,
diemixmag=None,
dielam=None,
diegap=None,
dielng=None,
diecut=None,
nstep=50,
)
def __init__(self, *args, **kwargs):
"""Initialize object."""
super().__init__(*args, **kwargs)
for k in self:
if k not in self._DEFAULT:
raise ValueError("%s: No default value has been provided for " "key %s" % (self.__class__.__name__, k))
def to_abivars(self):
"""Dictionary with Abinit input variables."""
return self.copy()
@pmg_serialize
def as_dict(self):
"""Convert object to dict."""
return self.copy()
@classmethod
def from_dict(cls, d):
"""Build object from dict."""
d = d.copy()
d.pop("@module", None)
d.pop("@class", None)
return cls(**d)
class Electrons(AbivarAble, MSONable):
"""The electronic degrees of freedom"""
def __init__(
self,
spin_mode="polarized",
smearing="fermi_dirac:0.1 eV",
algorithm=None,
nband=None,
fband=None,
charge=0.0,
comment=None,
): # occupancies=None,
"""
Constructor for Electrons object.
Args:
comment: String comment for Electrons
charge: Total charge of the system. Default is 0.
"""
super().__init__()
self.comment = comment
self.smearing = Smearing.as_smearing(smearing)
self.spin_mode = SpinMode.as_spinmode(spin_mode)
self.nband = nband
self.fband = fband
self.charge = charge
self.algorithm = algorithm
@property
def nsppol(self):
"""Number of independent spin polarizations."""
return self.spin_mode.nsppol
@property
def nspinor(self):
"""Number of independent spinor components."""
return self.spin_mode.nspinor
@property
def nspden(self):
"""Number of independent density components."""
return self.spin_mode.nspden
def as_dict(self):
"""json friendly dict representation"""
d = {}
d["@module"] = self.__class__.__module__
d["@class"] = self.__class__.__name__
d["spin_mode"] = self.spin_mode.as_dict()
d["smearing"] = self.smearing.as_dict()
d["algorithm"] = self.algorithm.as_dict() if self.algorithm else None
d["nband"] = self.nband
d["fband"] = self.fband
d["charge"] = self.charge
d["comment"] = self.comment
return d
@classmethod
def from_dict(cls, d):
"""Build object from dictionary."""
d = d.copy()
d.pop("@module", None)
d.pop("@class", None)
dec = MontyDecoder()
d["spin_mode"] = dec.process_decoded(d["spin_mode"])
d["smearing"] = dec.process_decoded(d["smearing"])
d["algorithm"] = dec.process_decoded(d["algorithm"]) if d["algorithm"] else None
return cls(**d)
def to_abivars(self):
"""Return dictionary with Abinit variables."""
abivars = self.spin_mode.to_abivars()
abivars.update(
{
"nband": self.nband,
"fband": self.fband,
"charge": self.charge,
}
)
if self.smearing:
abivars.update(self.smearing.to_abivars())
if self.algorithm:
abivars.update(self.algorithm)
# abivars["#comment"] = self.comment
return abivars
class KSamplingModes(Enum):
"""Enum if the different samplings of the BZ."""
monkhorst = 1
path = 2
automatic = 3
class KSampling(AbivarAble, MSONable):
"""
Input variables defining the K-point sampling.
"""
def __init__(
self,
mode=KSamplingModes.monkhorst,
num_kpts=0,
kpts=((1, 1, 1),),
kpt_shifts=(0.5, 0.5, 0.5),
kpts_weights=None,
use_symmetries=True,
use_time_reversal=True,
chksymbreak=None,
comment=None,
):
"""
Highly flexible constructor for KSampling objects. The flexibility comes
at the cost of usability and in general, it is recommended that you use
the default constructor only if you know exactly what you are doing and
requires the flexibility. For most usage cases, the object be constructed
far more easily using the convenience static constructors:
#. gamma_only
#. gamma_centered
#. monkhorst
#. monkhorst_automatic
#. path
and it is recommended that you use those.
Args:
mode: Mode for generating k-poits. Use one of the KSamplingModes enum types.
num_kpts: Number of kpoints if mode is "automatic"
Number of division for the sampling of the smallest segment if mode is "path".
Not used for the other modes
kpts: Number of divisions. Even when only a single specification is
required, e.g. in the automatic scheme, the kpts should still
be specified as a 2D array. e.g., [[20]] or [[2,2,2]].
kpt_shifts: Shifts for Kpoints.
use_symmetries: False if spatial symmetries should not be used
to reduce the number of independent k-points.
use_time_reversal: False if time-reversal symmetry should not be used
to reduce the number of independent k-points.
kpts_weights: Optional weights for kpoints. For explicit kpoints.
chksymbreak: Abinit input variable: check whether the BZ sampling preserves the symmetry of the crystal.
comment: String comment for Kpoints
.. note::
The default behavior of the constructor is monkhorst.
"""
if isinstance(mode, str):
mode = KSamplingModes[mode]
super().__init__()
self.mode = mode
self.comment = comment
self.num_kpts = num_kpts
self.kpts = kpts
self.kpt_shifts = kpt_shifts
self.kpts_weights = kpts_weights
self.use_symmetries = use_symmetries
self.use_time_reversal = use_time_reversal
self.chksymbreak = chksymbreak
abivars = {}
if mode == KSamplingModes.monkhorst:
assert num_kpts == 0
ngkpt = np.reshape(kpts, 3)
shiftk = np.reshape(kpt_shifts, (-1, 3))
if use_symmetries and use_time_reversal:
kptopt = 1
if not use_symmetries and use_time_reversal:
kptopt = 2
if not use_symmetries and not use_time_reversal:
kptopt = 3
if use_symmetries and not use_time_reversal:
kptopt = 4
abivars.update(
{
"ngkpt": ngkpt,
"shiftk": shiftk,
"nshiftk": len(shiftk),
"kptopt": kptopt,
"chksymbreak": chksymbreak,
}
)
elif mode == KSamplingModes.path:
if num_kpts <= 0:
raise ValueError("For Path mode, num_kpts must be specified and >0")
kptbounds = np.reshape(kpts, (-1, 3))
# print("in path with kptbound: %s " % kptbounds)
abivars.update(
{
"ndivsm": num_kpts,
"kptbounds": kptbounds,
"kptopt": -len(kptbounds) + 1,
}
)
elif mode == KSamplingModes.automatic:
kpts = np.reshape(kpts, (-1, 3))
if len(kpts) != num_kpts:
raise ValueError("For Automatic mode, num_kpts must be specified.")
abivars.update(
{
"kptopt": 0,
"kpt": kpts,
"nkpt": num_kpts,
"kptnrm": np.ones(num_kpts),
"wtk": kpts_weights, # for iscf/=-2, wtk.
"chksymbreak": chksymbreak,
}
)
else:
raise ValueError("Unknown mode %s" % mode)
self.abivars = abivars
# self.abivars["#comment"] = comment
@property
def is_homogeneous(self):
"""Homogeneous sampling."""
return self.mode not in ["path"]
@classmethod
def gamma_only(cls):
"""Gamma-only sampling"""
return cls(kpt_shifts=(0.0, 0.0, 0.0), comment="Gamma-only sampling")
@classmethod
def gamma_centered(cls, kpts=(1, 1, 1), use_symmetries=True, use_time_reversal=True):
"""
Convenient static constructor for an automatic Gamma centered Kpoint grid.
Args:
kpts: Subdivisions N_1, N_2 and N_3 along reciprocal lattice vectors.
use_symmetries: False if spatial symmetries should not be used
to reduce the number of independent k-points.
use_time_reversal: False if time-reversal symmetry should not be used
to reduce the number of independent k-points.
Returns:
:class:`KSampling` object.
"""
return cls(
kpts=[kpts],
kpt_shifts=(0.0, 0.0, 0.0),
use_symmetries=use_symmetries,
use_time_reversal=use_time_reversal,
comment="gamma-centered mode",
)
@classmethod
def monkhorst(
cls,
ngkpt,
shiftk=(0.5, 0.5, 0.5),
chksymbreak=None,
use_symmetries=True,
use_time_reversal=True,
comment=None,
):
"""
Convenient static constructor for a Monkhorst-Pack mesh.
Args:
ngkpt: Subdivisions N_1, N_2 and N_3 along reciprocal lattice vectors.
shiftk: Shift to be applied to the kpoints.
use_symmetries: Use spatial symmetries to reduce the number of k-points.
use_time_reversal: Use time-reversal symmetry to reduce the number of k-points.
Returns:
:class:`KSampling` object.
"""
return cls(
kpts=[ngkpt],
kpt_shifts=shiftk,
use_symmetries=use_symmetries,
use_time_reversal=use_time_reversal,
chksymbreak=chksymbreak,
comment=comment if comment else "Monkhorst-Pack scheme with user-specified shiftk",
)
@classmethod
def monkhorst_automatic(
cls,
structure,
ngkpt,
use_symmetries=True,
use_time_reversal=True,
chksymbreak=None,
comment=None,
):
"""
Convenient static constructor for an automatic Monkhorst-Pack mesh.
Args:
structure: :class:`Structure` object.
ngkpt: Subdivisions N_1, N_2 and N_3 along reciprocal lattice vectors.
use_symmetries: Use spatial symmetries to reduce the number of k-points.
use_time_reversal: Use time-reversal symmetry to reduce the number of k-points.
Returns:
:class:`KSampling` object.
"""
# TODO
shiftk = 3 * (0.5,)
# if lattice.ishexagonal:
# elif lattice.isbcc
# elif lattice.isfcc
return cls.monkhorst(
ngkpt,
shiftk=shiftk,
use_symmetries=use_symmetries,
use_time_reversal=use_time_reversal,
chksymbreak=chksymbreak,
comment=comment if comment else "Automatic Monkhorst-Pack scheme",
)
@classmethod
def _path(cls, ndivsm, structure=None, kpath_bounds=None, comment=None):
"""
Static constructor for path in k-space.
Args:
structure: :class:`Structure` object.
kpath_bounds: List with the reduced coordinates of the k-points defining the path.
ndivsm: Number of division for the smallest segment.
comment: Comment string.
Returns:
:class:`KSampling` object.
"""
if kpath_bounds is None:
# Compute the boundaries from the input structure.
from pymatgen.symmetry.bandstructure import HighSymmKpath
sp = HighSymmKpath(structure)
# Flat the array since "path" is a a list of lists!
kpath_labels = []
for labels in sp.kpath["path"]:
kpath_labels.extend(labels)
kpath_bounds = []
for label in kpath_labels:
red_coord = sp.kpath["kpoints"][label]
# print("label %s, red_coord %s" % (label, red_coord))
kpath_bounds.append(red_coord)
return cls(
mode=KSamplingModes.path,
num_kpts=ndivsm,
kpts=kpath_bounds,
comment=comment if comment else "K-Path scheme",
)
@classmethod
def path_from_structure(cls, ndivsm, structure):
"""See _path for the meaning of the variables"""
return cls._path(
ndivsm,
structure=structure,
comment="K-path generated automatically from structure",
)
@classmethod
def explicit_path(cls, ndivsm, kpath_bounds):
"""See _path for the meaning of the variables"""
return cls._path(ndivsm, kpath_bounds=kpath_bounds, comment="Explicit K-path")
@classmethod
def automatic_density(
cls,
structure,
kppa,
chksymbreak=None,
use_symmetries=True,
use_time_reversal=True,
shifts=(0.5, 0.5, 0.5),
):
"""
Returns an automatic Kpoint object based on a structure and a kpoint
density. Uses Gamma centered meshes for hexagonal cells and Monkhorst-Pack grids otherwise.
Algorithm:
Uses a simple approach scaling the number of divisions along each
reciprocal lattice vector proportional to its length.
Args:
structure: Input structure
kppa: Grid density
"""
lattice = structure.lattice
lengths = lattice.abc
shifts = np.reshape(shifts, (-1, 3))
ngrid = kppa / structure.num_sites / len(shifts)
mult = (ngrid * lengths[0] * lengths[1] * lengths[2]) ** (1 / 3.0)
num_div = [int(round(1.0 / lengths[i] * mult)) for i in range(3)]
# ensure that num_div[i] > 0
num_div = [i if i > 0 else 1 for i in num_div]
comment = "pymatge.io.abinit generated KPOINTS with grid density = " + "{} / atom".format(kppa)
return cls(
mode="monkhorst",
num_kpts=0,
kpts=[num_div],
kpt_shifts=shifts,
use_symmetries=use_symmetries,
use_time_reversal=use_time_reversal,
chksymbreak=chksymbreak,
comment=comment,
)
def to_abivars(self):
"""Dictionary with Abinit variables."""
return self.abivars
def as_dict(self):
"""Convert object to dict."""
enc = MontyEncoder()
return {
"mode": self.mode.name,
"comment": self.comment,
"num_kpts": self.num_kpts,
"kpts": enc.default(np.array(self.kpts)),
"kpt_shifts": self.kpt_shifts,
"kpts_weights": self.kpts_weights,
"use_symmetries": self.use_symmetries,
"use_time_reversal": self.use_time_reversal,
"chksymbreak": self.chksymbreak,
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
}
@classmethod
def from_dict(cls, d):
"""Build object from dict."""
d = d.copy()
d.pop("@module", None)
d.pop("@class", None)
dec = MontyDecoder()
d["kpts"] = dec.process_decoded(d["kpts"])
return cls(**d)
class Constraints(AbivarAble):
"""This object defines the constraints for structural relaxation"""
def to_abivars(self):
"""Dictionary with Abinit variables."""
raise NotImplementedError("")
class RelaxationMethod(AbivarAble, MSONable):
"""
This object stores the variables for the (constrained) structural optimization
ionmov and optcell specify the type of relaxation.
The other variables are optional and their use depend on ionmov and optcell.
A None value indicates that we use abinit default. Default values can
be modified by passing them to the constructor.
The set of variables are constructed in to_abivars depending on ionmov and optcell.
"""
_default_vars = {
"ionmov": MANDATORY,
"optcell": MANDATORY,
"ntime": 80,
"dilatmx": 1.05,
"ecutsm": 0.5,
"strfact": None,
"tolmxf": None,
"strtarget": None,
"atoms_constraints": {}, # Constraints are stored in a dictionary. {} means if no constraint is enforced.
}
IONMOV_DEFAULT = 3
OPTCELL_DEFAULT = 2
def __init__(self, *args, **kwargs):
"""Initialize object."""
# Initialize abivars with the default values.
self.abivars = self._default_vars
# Overwrite the keys with the args and kwargs passed to constructor.
self.abivars.update(*args, **kwargs)
self.abivars = AttrDict(self.abivars)
for k in self.abivars:
if k not in self._default_vars:
raise ValueError("%s: No default value has been provided for key %s" % (self.__class__.__name__, k))
for k in self.abivars:
if k is MANDATORY:
raise ValueError(
"%s: No default value has been provided for the mandatory key %s" % (self.__class__.__name__, k)
)
@classmethod
def atoms_only(cls, atoms_constraints=None):
"""Relax atomic positions, keep unit cell fixed."""
if atoms_constraints is None:
return cls(ionmov=cls.IONMOV_DEFAULT, optcell=0)
return cls(ionmov=cls.IONMOV_DEFAULT, optcell=0, atoms_constraints=atoms_constraints)
@classmethod
def atoms_and_cell(cls, atoms_constraints=None):
"""Relax atomic positions as well as unit cell"""
if atoms_constraints is None:
return cls(ionmov=cls.IONMOV_DEFAULT, optcell=cls.OPTCELL_DEFAULT)
return cls(
ionmov=cls.IONMOV_DEFAULT,
optcell=cls.OPTCELL_DEFAULT,
atoms_constraints=atoms_constraints,
)
@property
def move_atoms(self):
"""True if atoms must be moved."""
return self.abivars.ionmov != 0
@property
def move_cell(self):
"""True if lattice parameters must be optimized."""
return self.abivars.optcell != 0
def to_abivars(self):
"""Returns a dictionary with the abinit variables"""
# These variables are always present.
out_vars = {
"ionmov": self.abivars.ionmov,
"optcell": self.abivars.optcell,
"ntime": self.abivars.ntime,
}
# Atom relaxation.
if self.move_atoms:
out_vars.update(
{
"tolmxf": self.abivars.tolmxf,
}
)
if self.abivars.atoms_constraints:
# Add input variables for constrained relaxation.
raise NotImplementedError("")
out_vars.update(self.abivars.atoms_constraints.to_abivars())
# Cell relaxation.
if self.move_cell:
out_vars.update(
{
"dilatmx": self.abivars.dilatmx,
"ecutsm": self.abivars.ecutsm,
"strfact": self.abivars.strfact,
"strtarget": self.abivars.strtarget,
}
)
return out_vars
def as_dict(self):
"""Convert object to dict."""
d = dict(self._default_vars)
d["@module"] = self.__class__.__module__
d["@class"] = self.__class__.__name__
return d
@classmethod
def from_dict(cls, d):
"""Build object from dictionary."""
d = d.copy()
d.pop("@module", None)
d.pop("@class", None)
return cls(**d)
class PPModelModes(Enum):
"""Differnt kind of plasmon-pole models."""
noppmodel = 0
godby = 1
hybersten = 2
linden = 3
farid = 4
class PPModel(AbivarAble, MSONable):
"""
Parameters defining the plasmon-pole technique.
The common way to instanciate a PPModel object is via the class method PPModel.as_ppmodel(string)
"""
@classmethod
def as_ppmodel(cls, obj):
"""
Constructs an instance of PPModel from obj.
Accepts obj in the form:
* PPmodel instance
* string. e.g "godby:12.3 eV", "linden".
"""
if isinstance(obj, cls):
return obj
# obj is a string
if ":" not in obj:
mode, plasmon_freq = obj, None
else:
# Extract mode and plasmon_freq
mode, plasmon_freq = obj.split(":")
try:
plasmon_freq = float(plasmon_freq)
except ValueError:
plasmon_freq, unit = plasmon_freq.split()
plasmon_freq = units.Energy(float(plasmon_freq), unit).to("Ha")
return cls(mode=mode, plasmon_freq=plasmon_freq)
def __init__(self, mode="godby", plasmon_freq=None):
"""
Args:
mode: ppmodel type
plasmon_freq: Plasmon frequency in Ha.
"""
if isinstance(mode, str):
mode = PPModelModes[mode]
self.mode = mode
self.plasmon_freq = plasmon_freq
def __eq__(self, other):
if other is None:
return False
if self.mode != other.mode:
return False
if self.plasmon_freq is None:
return other.plasmon_freq is None
return np.allclose(self.plasmon_freq, other.plasmon_freq)
def __ne__(self, other):
return not self == other
def __bool__(self):
return self.mode != PPModelModes.noppmodel
# py2 old version
__nonzero__ = __bool__
def __repr__(self):
return "<%s at %s, mode = %s>" % (
self.__class__.__name__,
id(self),
str(self.mode),
)
def to_abivars(self):
"""Return dictionary with Abinit variables."""
if self:
return {"ppmodel": self.mode.value, "ppmfrq": self.plasmon_freq}
return {}
@classmethod
def get_noppmodel(cls):
"""Calculation without plasmon-pole model."""
return cls(mode="noppmodel", plasmon_freq=None)
def as_dict(self):
"""Convert object to dictionary."""
return {
"mode": self.mode.name,
"plasmon_freq": self.plasmon_freq,
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
}
@staticmethod
def from_dict(d):
"""Build object from dictionary."""
return PPModel(mode=d["mode"], plasmon_freq=d["plasmon_freq"])
class HilbertTransform(AbivarAble):
"""
Parameters for the Hilbert-transform method (Screening code)
i.e. the parameters defining the frequency mesh used for the spectral function
and the frequency mesh used for the polarizability
"""
def __init__(
self,
nomegasf,
domegasf=None,
spmeth=1,
nfreqre=None,
freqremax=None,
nfreqim=None,
freqremin=None,
):
"""
Args:
nomegasf: Number of points for sampling the spectral function along the real axis.
domegasf: Step in Ha for the linear mesh used for the spectral function.
spmeth: Algorith for the representation of the delta function.
nfreqre: Number of points along the real axis (linear mesh).
freqremax: Maximum frequency for W along the real axis (in hartree).
nfreqim: Number of point along the imaginary axis (Gauss-Legendre mesh).
freqremin: Minimum frequency for W along the real axis (in hartree).
"""
# Spectral function
self.nomegasf = nomegasf
self.domegasf = domegasf
self.spmeth = spmeth
# Mesh for the contour-deformation method used for the integration of the self-energy
self.nfreqre = nfreqre
self.freqremax = freqremax
self.freqremin = freqremin
self.nfreqim = nfreqim
def to_abivars(self):
"""Returns a dictionary with the abinit variables"""
return {
# Spectral function
"nomegasf": self.nomegasf,
"domegasf": self.domegasf,
"spmeth": self.spmeth,
# Frequency mesh for the polarizability
"nfreqre": self.nfreqre,
"freqremax": self.freqremax,
"nfreqim": self.nfreqim,
"freqremin": self.freqremin,
}
class ModelDielectricFunction(AbivarAble):
"""Model dielectric function used for BSE calculation"""
def __init__(self, mdf_epsinf):
"""
Args:
mdf_epsinf: Value of epsilon_infinity.
"""
self.mdf_epsinf = mdf_epsinf
def to_abivars(self):
"""Return dictionary with abinit variables."""
return {"mdf_epsinf": self.mdf_epsinf}
##########################################################################################
# WORK IN PROGRESS ######################################
##########################################################################################
class Screening(AbivarAble):
"""
This object defines the parameters used for the
computation of the screening function.
"""
# Approximations used for W
_WTYPES = {
"RPA": 0,
}
# Self-consistecy modes
_SC_MODES = {
"one_shot": 0,
"energy_only": 1,
"wavefunctions": 2,
}
def __init__(
self,
ecuteps,
nband,
w_type="RPA",
sc_mode="one_shot",
hilbert=None,
ecutwfn=None,
inclvkb=2,
):
"""
Args:
ecuteps: Cutoff energy for the screening (Ha units).
nband Number of bands for the Green's function
w_type: Screening type
sc_mode: Self-consistency mode.
hilbert: Instance of :class:`HilbertTransform` defining the parameters for the Hilber transform method.
ecutwfn: Cutoff energy for the wavefunctions (Default: ecutwfn == ecut).
inclvkb: Option for the treatment of the dipole matrix elements (NC pseudos).
"""
if w_type not in self._WTYPES:
raise ValueError("W_TYPE: %s is not supported" % w_type)
if sc_mode not in self._SC_MODES:
raise ValueError("Self-consistecy mode %s is not supported" % sc_mode)
self.ecuteps = ecuteps
self.nband = nband
self.w_type = w_type
self.sc_mode = sc_mode
self.ecutwfn = ecutwfn
self.inclvkb = inclvkb
if hilbert is not None:
raise NotImplementedError("Hilber transform not coded yet")
self.hilbert = hilbert
# Default values (equivalent to those used in Abinit8)
self.gwpara = 2
self.awtr = 1
self.symchi = 1
self.optdriver = 3
@property
def use_hilbert(self):
"""True if we are using the Hilbert transform method."""
return hasattr(self, "hilbert")
# @property
# def gwcalctyp(self):
# "Return the value of the gwcalctyp input variable"
# dig0 = str(self._SIGMA_TYPES[self.type])
# dig1 = str(self._SC_MODES[self.sc_mode]
# return dig1.strip() + dig0.strip()
def to_abivars(self):
"""Returns a dictionary with the abinit variables"""
abivars = {
"ecuteps": self.ecuteps,
"ecutwfn": self.ecutwfn,
"inclvkb": self.inclvkb,
"gwpara": self.gwpara,
"awtr": self.awtr,
"symchi": self.symchi,
"nband": self.nband,
# "gwcalctyp": self.gwcalctyp,
# "fftgw" : self.fftgw,
"optdriver": self.optdriver,
}
# Variables for the Hilber transform.
if self.use_hilbert:
abivars.update(self.hilbert.to_abivars())
return abivars
class SelfEnergy(AbivarAble):
"""
This object defines the parameters used for the computation of the self-energy.
"""
_SIGMA_TYPES = {
"gw": 0,
"hartree_fock": 5,
"sex": 6,
"cohsex": 7,
"model_gw_ppm": 8,
"model_gw_cd": 9,
}
_SC_MODES = {
"one_shot": 0,
"energy_only": 1,
"wavefunctions": 2,
}
def __init__(
self,
se_type,
sc_mode,
nband,
ecutsigx,
screening,
gw_qprange=1,
ppmodel=None,
ecuteps=None,
ecutwfn=None,
gwpara=2,
):
"""
Args:
se_type: Type of self-energy (str)
sc_mode: Self-consistency mode.
nband: Number of bands for the Green's function
ecutsigx: Cutoff energy for the exchange part of the self-energy (Ha units).
screening: :class:`Screening` instance.
gw_qprange: Option for the automatic selection of k-points and bands for GW corrections.
See Abinit docs for more detail. The default value makes the code computie the
QP energies for all the point in the IBZ and one band above and one band below the Fermi level.
ppmodel: :class:`PPModel` instance with the parameters used for the plasmon-pole technique.
ecuteps: Cutoff energy for the screening (Ha units).
ecutwfn: Cutoff energy for the wavefunctions (Default: ecutwfn == ecut).
"""
if se_type not in self._SIGMA_TYPES:
raise ValueError("SIGMA_TYPE: %s is not supported" % se_type)
if sc_mode not in self._SC_MODES:
raise ValueError("Self-consistecy mode %s is not supported" % sc_mode)
self.type = se_type
self.sc_mode = sc_mode
self.nband = nband
self.ecutsigx = ecutsigx
self.screening = screening
self.gw_qprange = gw_qprange
self.gwpara = gwpara
if ppmodel is not None:
assert not screening.use_hilbert
self.ppmodel = PPModel.as_ppmodel(ppmodel)
self.ecuteps = ecuteps if ecuteps is not None else screening.ecuteps
self.ecutwfn = ecutwfn
self.optdriver = 4
# band_mode in ["gap", "full"]
# if isinstance(kptgw, str) and kptgw == "all":
# self.kptgw = None
# self.nkptgw = None
# else:
# self.kptgw = np.reshape(kptgw, (-1,3))
# self.nkptgw = len(self.kptgw)
# if bdgw is None:
# raise ValueError("bdgw must be specified")
# if isinstance(bdgw, str):
# # TODO add new variable in Abinit so that we can specify
# # an energy interval around the KS gap.
# homo = float(nele) / 2.0
# #self.bdgw =
# else:
# self.bdgw = np.reshape(bdgw, (-1,2))
# self.freq_int = freq_int
@property
def use_ppmodel(self):
"""True if we are using the plasmon-pole approximation."""
return hasattr(self, "ppmodel")
@property
def gwcalctyp(self):
"""Returns the value of the gwcalctyp input variable."""
dig0 = str(self._SIGMA_TYPES[self.type])
dig1 = str(self._SC_MODES[self.sc_mode])
return dig1.strip() + dig0.strip()
@property
def symsigma(self):
"""1 if symmetries can be used to reduce the number of q-points."""
return 1 if self.sc_mode == "one_shot" else 0
def to_abivars(self):
"""Returns a dictionary with the abinit variables."""
abivars = dict(
gwcalctyp=self.gwcalctyp,
ecuteps=self.ecuteps,
ecutsigx=self.ecutsigx,
symsigma=self.symsigma,
gw_qprange=self.gw_qprange,
gwpara=self.gwpara,
optdriver=self.optdriver,
nband=self.nband
# "ecutwfn" : self.ecutwfn,
# "kptgw" : self.kptgw,
# "nkptgw" : self.nkptgw,
# "bdgw" : self.bdgw,
)
# FIXME: problem with the spin
# assert len(self.bdgw) == self.nkptgw
# ppmodel variables
if self.use_ppmodel:
abivars.update(self.ppmodel.to_abivars())
return abivars
class ExcHamiltonian(AbivarAble):
"""This object contains the parameters for the solution of the Bethe-Salpeter equation."""
# Types of excitonic Hamiltonian.
_EXC_TYPES = {
"TDA": 0, # Tamm-Dancoff approximation.
"coupling": 1, # Calculation with coupling.
}
# Algorithms used to compute the macroscopic dielectric function
# and/or the exciton wavefunctions.
_ALGO2VAR = {
"direct_diago": 1,
"haydock": 2,
"cg": 3,
}
# Options specifying the treatment of the Coulomb term.
_COULOMB_MODES = ["diago", "full", "model_df"]
def __init__(
self,
bs_loband,
nband,
mbpt_sciss,
coulomb_mode,
ecuteps,
spin_mode="polarized",
mdf_epsinf=None,
exc_type="TDA",
algo="haydock",
with_lf=True,
bs_freq_mesh=None,
zcut=None,
**kwargs,
):
r"""
Args:
bs_loband: Lowest band index (Fortran convention) used in the e-h basis set.
Can be scalar or array of shape (nsppol,). Must be >= 1 and <= nband
nband: Max band index used in the e-h basis set.
mbpt_sciss: Scissors energy in Hartree.
coulomb_mode: Treatment of the Coulomb term.
ecuteps: Cutoff energy for W in Hartree.
mdf_epsinf: Macroscopic dielectric function :math:`\\epsilon_\\inf` used in
the model dielectric function.
exc_type: Approximation used for the BSE Hamiltonian
with_lf: True if local field effects are included <==> exchange term is included
bs_freq_mesh: Frequency mesh for the macroscopic dielectric function (start, stop, step) in Ha.
zcut: Broadening parameter in Ha.
**kwargs:
Extra keywords
"""
spin_mode = SpinMode.as_spinmode(spin_mode)
# We want an array bs_loband(nsppol).
try:
bs_loband = np.reshape(bs_loband, spin_mode.nsppol)
except ValueError:
bs_loband = np.array(spin_mode.nsppol * [int(bs_loband)])
self.bs_loband = bs_loband
self.nband = nband
self.mbpt_sciss = mbpt_sciss
self.coulomb_mode = coulomb_mode
assert coulomb_mode in self._COULOMB_MODES
self.ecuteps = ecuteps
self.mdf_epsinf = mdf_epsinf
self.exc_type = exc_type
assert exc_type in self._EXC_TYPES
self.algo = algo
assert algo in self._ALGO2VAR
self.with_lf = with_lf
# if bs_freq_mesh is not given, abinit will select its own mesh.
self.bs_freq_mesh = np.array(bs_freq_mesh) if bs_freq_mesh is not None else bs_freq_mesh
self.zcut = zcut
self.optdriver = 99
# Extra options.
self.kwargs = kwargs
# if "chksymbreak" not in self.kwargs:
# self.kwargs["chksymbreak"] = 0
# Consistency check
if any(bs_loband < 0):
raise ValueError("bs_loband <= 0 while it is %s" % bs_loband)
if any(bs_loband >= nband):
raise ValueError("bs_loband (%s) >= nband (%s)" % (bs_loband, nband))
@property
def inclvkb(self):
"""Treatment of the dipole matrix element (NC pseudos, default is 2)"""
return self.kwargs.get("inclvkb", 2)
@property
def use_haydock(self):
"""True if we are using the Haydock iterative technique."""
return self.algo == "haydock"
@property
def use_cg(self):
"""True if we are using the conjugate gradient method."""
return self.algo == "cg"
@property
def use_direct_diago(self):
"""True if we are performing the direct diagonalization of the BSE Hamiltonian."""
return self.algo == "direct_diago"
def to_abivars(self):
"""Returns a dictionary with the abinit variables."""
abivars = dict(
bs_calctype=1,
bs_loband=self.bs_loband,
# nband=self.nband,
mbpt_sciss=self.mbpt_sciss,
ecuteps=self.ecuteps,
bs_algorithm=self._ALGO2VAR[self.algo],
bs_coulomb_term=21,
mdf_epsinf=self.mdf_epsinf,
bs_exchange_term=1 if self.with_lf else 0,
inclvkb=self.inclvkb,
zcut=self.zcut,
bs_freq_mesh=self.bs_freq_mesh,
bs_coupling=self._EXC_TYPES[self.exc_type],
optdriver=self.optdriver,
)
if self.use_haydock:
# FIXME
abivars.update(
bs_haydock_niter=100, # No. of iterations for Haydock
bs_hayd_term=0, # No terminator
bs_haydock_tol=[0.05, 0], # Stopping criteria
)
elif self.use_direct_diago:
raise NotImplementedError("")
elif self.use_cg:
raise NotImplementedError("")
else:
raise ValueError("Unknown algorithm for EXC: %s" % self.algo)
# Add extra kwargs
abivars.update(self.kwargs)
return abivars
| {
"content_hash": "8b2e436bc1120082d0d5beaf4cae52fc",
"timestamp": "",
"source": "github",
"line_count": 1719,
"max_line_length": 119,
"avg_line_length": 31.641070389761488,
"alnum_prop": 0.5658105201228144,
"repo_name": "gmatteo/pymatgen",
"id": "7c5bf9c70e0381d82d62ede3354794dd74e1c45a",
"size": "54550",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymatgen/io/abinit/abiobjects.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "277"
},
{
"name": "Python",
"bytes": "7840569"
},
{
"name": "Shell",
"bytes": "711"
}
],
"symlink_target": ""
} |
from bmbb_fish import BmBB
my_fish = BmBB()
my_fish.shut_down_fish()
| {
"content_hash": "3dbe7dbd3b046e620cf3412dbbf9ef30",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 26,
"avg_line_length": 14.2,
"alnum_prop": 0.704225352112676,
"repo_name": "mnr/rubberfish",
"id": "ed8c09aca2c62757c5818a897c98b903279d9a6d",
"size": "123",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "software/fishShutdown.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "57058"
},
{
"name": "Shell",
"bytes": "6072"
}
],
"symlink_target": ""
} |
import re
# The original password
puzzle_input = ['v', 'z', 'b', 'x', 'k', 'g', 'h', 'b']
# Checks for a row of 3 letters in the password
def three_straight_letters():
global puzzle_input
for i in range(len(puzzle_input) - 3):
for j in range(2):
if not ord(puzzle_input[i + j]) + 1 == ord(puzzle_input[i + j + 1]):
break
if j == 1:
return True
return False
# Checks for 2 different sets of doubles in the password
def has_double_doubles():
global puzzle_input
double_count = 0
last_double = None
for i in range(len(puzzle_input) - 1):
if puzzle_input[i] != last_double and puzzle_input[i] == puzzle_input[i + 1]:
double_count += 1
if double_count == 2:
return True
else:
last_double = puzzle_input[i]
return False
# Move the letter at position up by 1
# If the letter is 'z', make it 'a' and increment the previous letter
# Skip the letters 'i', 'o' and 'l'
def increment_by_one(position):
global puzzle_input
if puzzle_input[position] == 'z':
puzzle_input[position] = 'a'
increment_by_one(position - 1)
else:
puzzle_input[position] = chr(ord(puzzle_input[position]) + 1)
if puzzle_input[position] in {'i', 'o', 'l'}:
puzzle_input[position] = chr(ord(puzzle_input[position]) + 1)
# Skips any letters in the entire string which are 'i', 'o', or 'l'
def increment_all_until_valid():
global puzzle_input
for i in range(len(puzzle_input)):
if puzzle_input[i] in {'i', 'o', 'l'}:
increment_by_one(i)
for j in range(i + 1, len(puzzle_input)):
puzzle_input[j] = 'a'
# Increment until the password is valid
increment_by_one(7)
increment_all_until_valid()
while not has_double_doubles() or not three_straight_letters():
increment_by_one(7)
# Printing out the new password
print('Santa\'s new password should be:', puzzle_input)
# Increment until the password is valid
increment_by_one(7)
increment_all_until_valid()
while not has_double_doubles() or not three_straight_letters():
increment_by_one(7)
print('Santa needs a new password! Now it\'s', puzzle_input)
| {
"content_hash": "b59dc414e877e74e462c2898988724a4",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 79,
"avg_line_length": 29.52173913043478,
"alnum_prop": 0.6745213549337261,
"repo_name": "joseph-roque/advent-of-code",
"id": "691062d516ceafa64b19c60b78b97c71d01832c9",
"size": "2261",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "day_11/corporatepolicy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "89177"
}
],
"symlink_target": ""
} |
from a2billing_spyne.model import SipBuddy, Extensions
from contextlib import closing
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
with closing(open("sip.conf")) as file:
table = file.readlines()
sip = SipBuddy._type_info
db = create_engine('postgres://postgres:@localhost:5432/radius')
session = Session(db)
name = None
qualify = None
type = None
host = None
context = None
secret = None
dtmfmode = None
callerid = None
for line in table:
if line[0] == '[':
if name is not None and name != 'general':
session.add(SipBuddy(
name=name,
qualify=qualify,
type=type,
host=host,
context=context,
secret=secret,
dtmfmode=dtmfmode,
callerid=callerid,
))
session.flush()
name = line.split()
name = (name[0])[1:-1]
else:
data = line.split("=")
if name is not None and name != 'general' and data[0].isalpha():
if data[0] in sip.keys():
if data[0] == "qualify" and len(data) >= 2:
qualify = (data[1])[:-1]
elif data[0] == "type" and len(data) >= 2:
type = (data[1])[:-1]
elif data[0] == "host" and len(data) >= 2:
host = (data[1])[:-1]
elif data[0] == "context" and len(data) >= 2:
context = (data[1])[:-1]
elif data[0] == "secret" and len(data) >= 2:
secret = (data[1])[:-1]
elif data[0] == "dtmfmode" and len(data) >= 2:
dtmfmode = (data[1])[:-1]
elif data[0] == "callerid" and len(data) >= 2:
callerid = (data[1])[:-1]
session.commit()
with closing(open("extensions.conf")) as file:
table = file.readlines()
exten = None
priority = None
app = None
appdata = None
context = None
sip_buddies = session.query(SipBuddy).all()
contexts = set()
for sb in sip_buddies:
contexts.add(sb.context)
for line in table:
if line[0] == '[':
context = line.split()
context = (context[0])[1:-1]
print context
else:
data = line.split(" => ")
if context is not None and context in contexts and data[0].isalpha():
if data[0] == "exten":
areas = data[1].split(",")
appdatavalue = areas[2].split("(")
exten = areas[0]
if exten[0] == "_":
exten = exten[1:-1]
priority = areas[1]
app = appdatavalue[0]
appdata = (appdatavalue[1])[:-2]
print data[0], exten, priority, app, appdata
if context in contexts:
session.add(Extensions(
exten=exten,
priority=priority,
app=app,
appdata=appdata,
context=context,
))
session.flush()
session.commit()
| {
"content_hash": "501f9bf8549d99a097e1f35d942680f1",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 77,
"avg_line_length": 26.00854700854701,
"alnum_prop": 0.4972067039106145,
"repo_name": "plq/a2billing-spyne",
"id": "5b26d56661feec8cb7228750cb37dccb5d49ecae",
"size": "3064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "a2billing_spyne/service/conf_reader.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1733"
},
{
"name": "HTML",
"bytes": "246"
},
{
"name": "PLpgSQL",
"bytes": "136769"
},
{
"name": "Python",
"bytes": "74881"
},
{
"name": "Shell",
"bytes": "2556"
}
],
"symlink_target": ""
} |
from inspect import isclass
from .target import Target, ValueTarget
from .flex import FlexSetter, FlexParameter
from .modview import ModPower, ModView
from .snap import SnapshotButton, SnapshotSelector
from .parameter import Parameter
from .switchview import SwitchView
from .pageflip import PageFlip
from .bpm import BPM
TARGETS = {C.__name__: C for C in globals().values()
if isclass(C) and issubclass(C, Target)}
def get_target(name):
return TARGETS[name]
| {
"content_hash": "0c760d6a8fb728b4bf2671359cda023b",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 63,
"avg_line_length": 25.94736842105263,
"alnum_prop": 0.7403651115618661,
"repo_name": "echolox/smartbcr2k",
"id": "be609009f13c71d946e309e3fcf434bb3be9bf09",
"size": "493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "targets/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "135361"
}
],
"symlink_target": ""
} |
from scanForSections import parseOrigin
from scanForSections import parseCategories
from scanForProducts import parseProductPages
from scanForItems import parseItemPages
from scanForItems import checkAvailability
from scanForItems import itemList
def main():
scanEntireSite()
def scanEntireSite():
"""This function is a script to run through the entire site, locating
every single purchasable item.
The site is structured like:
Cowboom
/ | \
Categories
/ | | \
Subcategories
/ / | | \
Products (lists of items)
/ / | | \ \
Individual Items
We breadth first search to the Subcategories layer and then Depth
first search down to each item.
Caveat: There is overlap of Products between different subcategories,
so it would be wiser to build a list of unique products before scanning
one layer deeper."""
# Start at the front page
categories = parseOrigin()
print categories
# If categories were found
if categories:
# Scan for subcategories
siteStructure = parseCategories(categories)
print siteStructure
# If subcategories were found
if siteStructure:
for category, subcategories in siteStructure.iteritems():
# Iterate through subcategories
for subcategory in subcategories:
# Parse each subcategory for products
categoryStructure = parseProductPages(category,subcategory)
print categoryStructure
# Iterate through products in each subcategory
for contentID, productName in categoryStructure.iteritems():
# If in stock
if checkAvailability(contentID):
# Start a list of individual items for purchase
items = itemList()
items = parseItemPages(contentID,items)
items.show()
# Full scan took 19m20.344s
# 15m34.936s
if __name__ == "__main__":
main() | {
"content_hash": "e554900e88781e172488f504ecc681bc",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 72,
"avg_line_length": 28.903225806451612,
"alnum_prop": 0.7349330357142857,
"repo_name": "cmrust/boomscraper",
"id": "fd8365a8cfab3334414da862e72fab8d031e8b83",
"size": "1810",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12595"
}
],
"symlink_target": ""
} |
"""The tests for the hassio component."""
import asyncio
import aiohttp
@asyncio.coroutine
def test_api_ping(hassio_handler, aioclient_mock):
"""Test setup with API ping."""
aioclient_mock.get(
"http://127.0.0.1/supervisor/ping", json={'result': 'ok'})
assert (yield from hassio_handler.is_connected())
assert aioclient_mock.call_count == 1
@asyncio.coroutine
def test_api_ping_error(hassio_handler, aioclient_mock):
"""Test setup with API ping error."""
aioclient_mock.get(
"http://127.0.0.1/supervisor/ping", json={'result': 'error'})
assert not (yield from hassio_handler.is_connected())
assert aioclient_mock.call_count == 1
@asyncio.coroutine
def test_api_ping_exeption(hassio_handler, aioclient_mock):
"""Test setup with API ping exception."""
aioclient_mock.get(
"http://127.0.0.1/supervisor/ping", exc=aiohttp.ClientError())
assert not (yield from hassio_handler.is_connected())
assert aioclient_mock.call_count == 1
@asyncio.coroutine
def test_api_homeassistant_info(hassio_handler, aioclient_mock):
"""Test setup with API homeassistant info."""
aioclient_mock.get(
"http://127.0.0.1/homeassistant/info", json={
'result': 'ok', 'data': {'last_version': '10.0'}})
data = yield from hassio_handler.get_homeassistant_info()
assert aioclient_mock.call_count == 1
assert data['last_version'] == "10.0"
@asyncio.coroutine
def test_api_homeassistant_info_error(hassio_handler, aioclient_mock):
"""Test setup with API homeassistant info error."""
aioclient_mock.get(
"http://127.0.0.1/homeassistant/info", json={
'result': 'error', 'message': None})
data = yield from hassio_handler.get_homeassistant_info()
assert aioclient_mock.call_count == 1
assert data is None
@asyncio.coroutine
def test_api_homeassistant_stop(hassio_handler, aioclient_mock):
"""Test setup with API HomeAssistant stop."""
aioclient_mock.post(
"http://127.0.0.1/homeassistant/stop", json={'result': 'ok'})
assert (yield from hassio_handler.stop_homeassistant())
assert aioclient_mock.call_count == 1
@asyncio.coroutine
def test_api_homeassistant_restart(hassio_handler, aioclient_mock):
"""Test setup with API HomeAssistant restart."""
aioclient_mock.post(
"http://127.0.0.1/homeassistant/restart", json={'result': 'ok'})
assert (yield from hassio_handler.restart_homeassistant())
assert aioclient_mock.call_count == 1
@asyncio.coroutine
def test_api_homeassistant_config(hassio_handler, aioclient_mock):
"""Test setup with API HomeAssistant restart."""
aioclient_mock.post(
"http://127.0.0.1/homeassistant/check", json={
'result': 'ok', 'data': {'test': 'bla'}})
data = yield from hassio_handler.check_homeassistant_config()
assert data['data']['test'] == 'bla'
assert aioclient_mock.call_count == 1
| {
"content_hash": "7ea1f97290661ff4590bb8dc986e5ef0",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 72,
"avg_line_length": 32.666666666666664,
"alnum_prop": 0.6751700680272109,
"repo_name": "persandstrom/home-assistant",
"id": "78745489a788282dd442fb31346ad85f894ab1e8",
"size": "2940",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/components/hassio/test_handler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1067"
},
{
"name": "Python",
"bytes": "11745210"
},
{
"name": "Ruby",
"bytes": "518"
},
{
"name": "Shell",
"bytes": "16652"
}
],
"symlink_target": ""
} |
"""Hinted Support Vector Machine
This module contains a class that implements Hinted Support Vector Machine, an
active learning algorithm.
Standalone hintsvm can be retrieved from https://github.com/yangarbiter/hintsvm
"""
import numpy as np
from libact.base.interfaces import QueryStrategy
from libact.query_strategies._hintsvm import hintsvm_query
from libact.utils import inherit_docstring_from, seed_random_state, zip
class HintSVM(QueryStrategy):
r"""Hinted Support Vector Machine
Hinted Support Vector Machine is an active learning algorithm within the
hined sampling framework with an extended support vector machine.
Parameters
----------
Cl : float, >0, optional (default=0.1)
The weight of the classification error on labeled pool.
Ch : float, >0, optional (default=0.1)
The weight of the hint error on hint pool.
p : float, >0 and <=1, optional (default=.5)
The probability to select an instance from unlabeld pool to hint pool.
random_state : {int, np.random.RandomState instance, None}, optional (default=None)
If int or None, random_state is passed as parameter to generate
np.random.RandomState instance. if np.random.RandomState instance,
random_state is the random number generate.
kernel : {'linear', 'poly', 'rbf', 'sigmoid'}, optional (default='linear')
linear: u'\*v
poly: (gamma\*u'\*v + coef0)^degree
rbf: exp(-gamma\*|u-v|^2)
sigmoid: tanh(gamma\*u'\*v + coef0)
degree : int, optional (default=3)
Parameter for kernel function.
gamma : float, optional (default=0.1)
Parameter for kernel function.
coef0 : float, optional (default=0.)
Parameter for kernel function.
tol : float, optional (default=1e-3)
Tolerance of termination criterion.
shrinking : {0, 1}, optional (default=1)
Whether to use the shrinking heuristics.
cache_size : float, optional (default=100.)
Set cache memory size in MB.
verbose : int, optional (default=0)
Set verbosity level for hintsvm solver.
Attributes
----------
random_states\_ : np.random.RandomState instance
The random number generator using.
Examples
--------
Here is an example of declaring a HintSVM query_strategy object:
.. code-block:: python
from libact.query_strategies import HintSVM
qs = HintSVM(
dataset, # Dataset object
Cl=0.01,
p=0.8,
)
References
----------
.. [1] Li, Chun-Liang, Chun-Sung Ferng, and Hsuan-Tien Lin. "Active Learning
with Hinted Support Vector Machine." ACML. 2012.
.. [2] Chun-Liang Li, Chun-Sung Ferng, and Hsuan-Tien Lin. Active learning
using hint information. Neural Computation, 27(8):1738--1765, August
2015.
"""
def __init__(self, *args, **kwargs):
super(HintSVM, self).__init__(*args, **kwargs)
# Weight on labeled data's classification error
self.cl = kwargs.pop('Cl', 0.1)
if self.cl <= 0:
raise ValueError('Parameter Cl should be greater than 0.')
# Weight on hinted data's classification error
self.ch = kwargs.pop('Ch', 0.1)
if self.ch <= 0:
raise ValueError('Parameter Cl should be greater than 0.')
# Prabability of sampling a data from unlabeled pool to hinted pool
self.p = kwargs.pop('p', 0.5)
if self.p > 1.0 or self.p < 0.0:
raise ValueError(
'Parameter p should be greater than or equal to 0 and less '
'than or equal to 1.'
)
random_state = kwargs.pop('random_state', None)
self.random_state_ = seed_random_state(random_state)
# svm solver parameters
self.svm_params = {}
self.svm_params['kernel'] = kwargs.pop('kernel', 'linear')
self.svm_params['degree'] = kwargs.pop('degree', 3)
self.svm_params['gamma'] = kwargs.pop('gamma', 0.1)
self.svm_params['coef0'] = kwargs.pop('coef0', 0.)
self.svm_params['tol'] = kwargs.pop('tol', 1e-3)
self.svm_params['shrinking'] = kwargs.pop('shrinking', 1)
self.svm_params['cache_size'] = kwargs.pop('cache_size', 100.)
self.svm_params['verbose'] = kwargs.pop('verbose', 0)
self.svm_params['C'] = self.cl
@inherit_docstring_from(QueryStrategy)
def make_query(self):
dataset = self.dataset
unlabeled_entry_ids, unlabeled_pool = dataset.get_unlabeled_entries()
labeled_pool, y = dataset.get_labeled_entries()
if len(np.unique(y)) > 2:
raise ValueError("HintSVM query strategy support binary class "
"active learning only. Found %s classes" % len(np.unique(y)))
hint_pool_idx = self.random_state_.choice(
len(unlabeled_pool), int(len(unlabeled_pool) * self.p))
hint_pool = np.array(unlabeled_pool)[hint_pool_idx]
weight = [1.0 for _ in range(len(labeled_pool))] +\
[(self.ch / self.cl) for _ in range(len(hint_pool))]
y = list(y) + [0 for _ in range(len(hint_pool))]
X = [x for x in labeled_pool] +\
[x for x in hint_pool]
p_val = hintsvm_query(
np.array(X, dtype=np.float64),
np.array(y, dtype=np.float64),
np.array(weight, dtype=np.float64),
np.array(unlabeled_pool, dtype=np.float64),
self.svm_params)
p_val = [abs(float(val[0])) for val in p_val]
idx = int(np.argmax(p_val))
return unlabeled_entry_ids[idx]
| {
"content_hash": "0e7cfb866dfaf98b36b1197d79db6f2c",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 87,
"avg_line_length": 35.56875,
"alnum_prop": 0.6099103848181339,
"repo_name": "ntucllab/libact",
"id": "f6252cc729c60be01c1348f917e4a7376eaf76e0",
"size": "5691",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libact/query_strategies/hintsvm.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "24287"
},
{
"name": "C++",
"bytes": "37716"
},
{
"name": "Python",
"bytes": "206547"
}
],
"symlink_target": ""
} |
"""
pycompilation is a package for meta programming. It aims to support
multiple compilers: GNU, Intel, PGI.
"""
from ._release import __version__
from .compilation import (
compile_sources, link_py_so, src2obj,
compile_link_import_py_ext, compile_link_import_strings
)
from .util import (
missing_or_other_newer, md5_of_file,
import_module_from_file, CompilationError, FileNotFoundError
)
| {
"content_hash": "48c149a6f4b6c2e1405632fa7a3ec935",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 67,
"avg_line_length": 25.5625,
"alnum_prop": 0.7310513447432763,
"repo_name": "bjodah/pycompilation",
"id": "f393872241df083f178cd3eb102379d981f22acc",
"size": "409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycompilation/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "68718"
},
{
"name": "Shell",
"bytes": "8580"
}
],
"symlink_target": ""
} |
from bs4 import BeautifulSoup
import re
def getCities():
with open("sites.htm") as sites:
fh = sites.read()
soup = BeautifulSoup(fh, "html.parser")
placesDict = {}
for columnDiv in soup.h1.next_sibling.next_sibling:
for state in columnDiv:
for city in state:
m = (re.search('<li><a href="(.+)">(.+)</a>', str(city)))
if m:
placesDict[m.group(2)] = m.group(1)
return(placesDict)
| {
"content_hash": "93e86b3683dc8460006bf456f8a42193",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 62,
"avg_line_length": 22.05263157894737,
"alnum_prop": 0.6229116945107399,
"repo_name": "MuSystemsAnalysis/craigslist_area_search",
"id": "fec922ddaec07a638062872503e9bcf797201956",
"size": "642",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scrapeSites.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "83421"
},
{
"name": "Python",
"bytes": "3550"
}
],
"symlink_target": ""
} |
__version__=''' $Id: _fontdata.py 3959 2012-09-27 14:39:39Z robin $ '''
__doc__="""Database of font related things
- standardFonts - tuple of the 14 standard string font names
- standardEncodings - tuple of the known standard font names
- encodings - a mapping object from standard encoding names (and minor variants)
to the encoding vectors ie the tuple of string glyph names
- widthsByFontGlyph - fontname x glyphname --> width of glyph
- widthVectorsByFont - fontName -> vector of widths
This module defines a static, large data structure. At the request
of the Jython project, we have split this off into separate modules
as Jython cannot handle more than 64k of bytecode in the 'top level'
code of a Python module.
"""
import os, sys
# mapping of name to width vector, starts empty until fonts are added
# e.g. widths['Courier'] = [...600,600,600,...]
widthVectorsByFont = {}
fontsByName = {}
fontsByBaseEnc = {}
# this is a list of the standard 14 font names in Acrobat Reader
standardFonts = (
'Courier', 'Courier-Bold', 'Courier-Oblique', 'Courier-BoldOblique',
'Helvetica', 'Helvetica-Bold', 'Helvetica-Oblique', 'Helvetica-BoldOblique',
'Times-Roman', 'Times-Bold', 'Times-Italic', 'Times-BoldItalic',
'Symbol','ZapfDingbats')
standardFontAttributes = {
#family, bold, italic defined for basic ones
'Courier':('Courier',0,0),
'Courier-Bold':('Courier',1,0),
'Courier-Oblique':('Courier',0,1),
'Courier-BoldOblique':('Courier',1,1),
'Helvetica':('Helvetica',0,0),
'Helvetica-Bold':('Helvetica',1,0),
'Helvetica-Oblique':('Helvetica',0,1),
'Helvetica-BoldOblique':('Helvetica',1,1),
'Times-Roman':('Times-Roman',0,0),
'Times-Bold':('Times-Roman',1,0),
'Times-Italic':('Times-Roman',0,1),
'Times-BoldItalic':('Times-Roman',1,1),
'Symbol':('Symbol',0,0),
'ZapfDingbats':('ZapfDingbats',0,0)
}
#this maps fontnames to the equivalent filename root.
_font2fnrMapWin32 = {
'symbol': 'sy______',
'zapfdingbats': 'zd______',
'helvetica': '_a______',
'helvetica-bold': '_ab_____',
'helvetica-boldoblique': '_abi____',
'helvetica-oblique': '_ai_____',
'times-bold': '_eb_____',
'times-bolditalic': '_ebi____',
'times-italic': '_ei_____',
'times-roman': '_er_____',
'courier-bold': 'cob_____',
'courier-boldoblique': 'cobo____',
'courier': 'com_____',
'courier-oblique': 'coo_____',
}
if sys.platform in ('linux2',):
_font2fnrMapLinux2 ={
'symbol': 'Symbol',
'zapfdingbats': 'ZapfDingbats',
'helvetica': 'Arial',
'helvetica-bold': 'Arial-Bold',
'helvetica-boldoblique': 'Arial-BoldItalic',
'helvetica-oblique': 'Arial-Italic',
'times-bold': 'TimesNewRoman-Bold',
'times-bolditalic':'TimesNewRoman-BoldItalic',
'times-italic': 'TimesNewRoman-Italic',
'times-roman': 'TimesNewRoman',
'courier-bold': 'Courier-Bold',
'courier-boldoblique': 'Courier-BoldOblique',
'courier': 'Courier',
'courier-oblique': 'Courier-Oblique',
}
_font2fnrMap = _font2fnrMapLinux2
for k, v in _font2fnrMap.items():
if k in _font2fnrMapWin32.keys():
_font2fnrMapWin32[v.lower()] = _font2fnrMapWin32[k]
del k, v
else:
_font2fnrMap = _font2fnrMapWin32
def _findFNR(fontName):
return _font2fnrMap[fontName.lower()]
from reportlab.rl_config import T1SearchPath
from reportlab.lib.utils import rl_isfile
def _searchT1Dirs(n,rl_isfile=rl_isfile,T1SearchPath=T1SearchPath):
assert T1SearchPath!=[], "No Type-1 font search path"
for d in T1SearchPath:
f = os.path.join(d,n)
if rl_isfile(f): return f
return None
del T1SearchPath, rl_isfile
def findT1File(fontName,ext='.pfb'):
if sys.platform in ('linux2',) and ext=='.pfb':
try:
f = _searchT1Dirs(_findFNR(fontName))
if f: return f
except:
pass
try:
f = _searchT1Dirs(_font2fnrMapWin32[fontName.lower()]+ext)
if f: return f
except:
pass
return _searchT1Dirs(_findFNR(fontName)+ext)
# this lists the predefined font encodings - WinAnsi and MacRoman. We have
# not added MacExpert - it's possible, but would complicate life and nobody
# is asking. StandardEncoding means something special.
standardEncodings = ('WinAnsiEncoding','MacRomanEncoding','StandardEncoding','SymbolEncoding','ZapfDingbatsEncoding','PDFDocEncoding', 'MacExpertEncoding')
#this is the global mapping of standard encodings to name vectors
class _Name2StandardEncodingMap(dict):
'''Trivial fake dictionary with some [] magic'''
_XMap = {'winansi':'WinAnsiEncoding','macroman': 'MacRomanEncoding','standard':'StandardEncoding','symbol':'SymbolEncoding', 'zapfdingbats':'ZapfDingbatsEncoding','pdfdoc':'PDFDocEncoding', 'macexpert':'MacExpertEncoding'}
def __setattr__(self,x,v):
y = x.lower()
if y[-8:]=='encoding': y = y[:-8]
y = self._XMap[y]
if y in self.keys(): raise IndexError('Encoding %s is already set' % y)
self[y] = v
def __getattr__(self,x):
y = x.lower()
if y[-8:]=='encoding': y = y[:-8]
y = self._XMap[y]
return self[y]
encodings = _Name2StandardEncodingMap()
from reportlab.pdfbase._fontdata_enc_winansi import WinAnsiEncoding
from reportlab.pdfbase._fontdata_enc_macroman import MacRomanEncoding
from reportlab.pdfbase._fontdata_enc_standard import StandardEncoding
from reportlab.pdfbase._fontdata_enc_symbol import SymbolEncoding
from reportlab.pdfbase._fontdata_enc_zapfdingbats import ZapfDingbatsEncoding
from reportlab.pdfbase._fontdata_enc_pdfdoc import PDFDocEncoding
from reportlab.pdfbase._fontdata_enc_macexpert import MacExpertEncoding
encodings.update({
'WinAnsiEncoding': WinAnsiEncoding,
'MacRomanEncoding': MacRomanEncoding,
'StandardEncoding': StandardEncoding,
'SymbolEncoding': SymbolEncoding,
'ZapfDingbatsEncoding': ZapfDingbatsEncoding,
'PDFDocEncoding': PDFDocEncoding,
'MacExpertEncoding': MacExpertEncoding,
})
ascent_descent = {
'Courier': (629, -157),
'Courier-Bold': (626, -142),
'Courier-BoldOblique': (626, -142),
'Courier-Oblique': (629, -157),
'Helvetica': (718, -207),
'Helvetica-Bold': (718, -207),
'Helvetica-BoldOblique': (718, -207),
'Helvetica-Oblique': (718, -207),
'Times-Roman': (683, -217),
'Times-Bold': (676, -205),
'Times-BoldItalic': (699, -205),
'Times-Italic': (683, -205),
'Symbol': (0, 0),
'ZapfDingbats': (0, 0)
}
import reportlab.pdfbase._fontdata_widths_courier
import reportlab.pdfbase._fontdata_widths_courierbold
import reportlab.pdfbase._fontdata_widths_courieroblique
import reportlab.pdfbase._fontdata_widths_courierboldoblique
import reportlab.pdfbase._fontdata_widths_helvetica
import reportlab.pdfbase._fontdata_widths_helveticabold
import reportlab.pdfbase._fontdata_widths_helveticaoblique
import reportlab.pdfbase._fontdata_widths_helveticaboldoblique
import reportlab.pdfbase._fontdata_widths_timesroman
import reportlab.pdfbase._fontdata_widths_timesbold
import reportlab.pdfbase._fontdata_widths_timesitalic
import reportlab.pdfbase._fontdata_widths_timesbolditalic
import reportlab.pdfbase._fontdata_widths_symbol
import reportlab.pdfbase._fontdata_widths_zapfdingbats
widthsByFontGlyph = {
'Courier':
reportlab.pdfbase._fontdata_widths_courier.widths,
'Courier-Bold':
reportlab.pdfbase._fontdata_widths_courierbold.widths,
'Courier-Oblique':
reportlab.pdfbase._fontdata_widths_courieroblique.widths,
'Courier-BoldOblique':
reportlab.pdfbase._fontdata_widths_courierboldoblique.widths,
'Helvetica':
reportlab.pdfbase._fontdata_widths_helvetica.widths,
'Helvetica-Bold':
reportlab.pdfbase._fontdata_widths_helveticabold.widths,
'Helvetica-Oblique':
reportlab.pdfbase._fontdata_widths_helveticaoblique.widths,
'Helvetica-BoldOblique':
reportlab.pdfbase._fontdata_widths_helveticaboldoblique.widths,
'Times-Roman':
reportlab.pdfbase._fontdata_widths_timesroman.widths,
'Times-Bold':
reportlab.pdfbase._fontdata_widths_timesbold.widths,
'Times-Italic':
reportlab.pdfbase._fontdata_widths_timesitalic.widths,
'Times-BoldItalic':
reportlab.pdfbase._fontdata_widths_timesbolditalic.widths,
'Symbol':
reportlab.pdfbase._fontdata_widths_symbol.widths,
'ZapfDingbats':
reportlab.pdfbase._fontdata_widths_zapfdingbats.widths,
}
#preserve the initial values here
def _reset(
initial_dicts=dict(
ascent_descent=ascent_descent.copy(),
fontsByBaseEnc=fontsByBaseEnc.copy(),
fontsByName=fontsByName.copy(),
standardFontAttributes=standardFontAttributes.copy(),
widthVectorsByFont=widthVectorsByFont.copy(),
widthsByFontGlyph=widthsByFontGlyph.copy(),
)
):
for k,v in initial_dicts.items():
d=globals()[k]
d.clear()
d.update(v)
from reportlab.rl_config import register_reset
register_reset(_reset)
del register_reset
| {
"content_hash": "63d036f973501112d47d4830d2f94a91",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 226,
"avg_line_length": 39.53252032520325,
"alnum_prop": 0.6419537275064268,
"repo_name": "nakagami/reportlab",
"id": "ddf14cdd7532183e24cc8c0b9453840d75cf2b62",
"size": "9926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/reportlab/pdfbase/_fontdata.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "754561"
},
{
"name": "C++",
"bytes": "1351"
},
{
"name": "Java",
"bytes": "6333"
},
{
"name": "Python",
"bytes": "3439804"
},
{
"name": "Shell",
"bytes": "1673"
}
],
"symlink_target": ""
} |
from celery.utils.log import get_task_logger
from flask import current_app
import datetime
from redash.worker import celery
from redash import utils
from redash import models, settings
from .base import BaseTask
logger = get_task_logger(__name__)
def base_url(org):
if settings.MULTI_ORG:
return "https://{}/{}".format(settings.HOST, org.slug)
return settings.HOST
def notify_subscriptions(alert, new_state):
host = base_url(alert.query_rel.org)
for subscription in alert.subscriptions:
try:
subscription.notify(alert, alert.query, subscription.user, new_state, current_app, host)
except Exception as e:
logger.exception("Error with processing destination")
def should_notify(alert, new_state):
passed_rearm_threshold = False
if alert.rearm and alert.last_triggered_at:
passed_rearm_threshold = alert.last_triggered_at + datetime.timedelta(seconds=alert.rearm) < utils.utcnow()
return new_state != alert.state or (alert.state == models.Alert.TRIGGERED_STATE and passed_rearm_threshold)
@celery.task(name="redash.tasks.check_alerts_for_query", base=BaseTask)
def check_alerts_for_query(query_id):
logger.debug("Checking query %d for alerts", query_id)
query = models.Query.query.get(query_id)
for alert in query.alerts:
new_state = alert.evaluate()
if should_notify(alert, new_state):
logger.info("Alert %d new state: %s", alert.id, new_state)
old_state = alert.state
alert.state = new_state
alert.last_triggered_at = utils.utcnow()
models.db.session.commit()
if old_state == models.Alert.UNKNOWN_STATE and new_state == models.Alert.OK_STATE:
logger.debug("Skipping notification (previous state was unknown and now it's ok).")
continue
notify_subscriptions(alert, new_state)
| {
"content_hash": "f5223d04f06a06d0645fe66af9441732",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 115,
"avg_line_length": 33.10344827586207,
"alnum_prop": 0.6755208333333333,
"repo_name": "stefanseifert/redash",
"id": "a431f2bb892d8e61275fff9d799c055af063ffa3",
"size": "1920",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "redash/tasks/alerts.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "225222"
},
{
"name": "HTML",
"bytes": "115585"
},
{
"name": "JavaScript",
"bytes": "248274"
},
{
"name": "Makefile",
"bytes": "881"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Nginx",
"bytes": "577"
},
{
"name": "Python",
"bytes": "599154"
},
{
"name": "Ruby",
"bytes": "709"
},
{
"name": "Shell",
"bytes": "43625"
}
],
"symlink_target": ""
} |
''' The core of ``pyfftw`` consists of the :class:`FFTW` class,
:ref:`wisdom functions <wisdom_functions>` and a couple of
:ref:`utility functions <utility_functions>` for dealing with aligned
arrays.
This module represents the full interface to the underlying `FFTW
library <http://www.fftw.org/>`_. However, users may find it easier to
use the helper routines provided in :mod:`pyfftw.builders`. Default values
used by the helper routines can be controlled as via
:ref:`configuration variables <configuration_variables>`.
'''
import os
from .pyfftw import (
FFTW,
export_wisdom,
import_wisdom,
forget_wisdom,
simd_alignment,
n_byte_align_empty,
n_byte_align,
is_n_byte_aligned,
byte_align,
is_byte_aligned,
empty_aligned,
ones_aligned,
zeros_aligned,
next_fast_len,
_supported_types,
_supported_nptypes_complex,
_supported_nptypes_real,
_all_types_human_readable,
_all_types_np,
_threading_type
)
from . import config
from . import builders
from . import interfaces
# clean up the namespace
del builders.builders
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| {
"content_hash": "8726a9314851bc70c2297f8d0add72b8",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 74,
"avg_line_length": 26.541666666666668,
"alnum_prop": 0.6656200941915228,
"repo_name": "hgomersall/pyFFTW",
"id": "6097f8b8619129f0ff6fb763c5c010f77445a093",
"size": "1320",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pyfftw/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4267"
},
{
"name": "C",
"bytes": "40032"
},
{
"name": "PowerShell",
"bytes": "1327"
},
{
"name": "Python",
"bytes": "541849"
},
{
"name": "Shell",
"bytes": "561"
}
],
"symlink_target": ""
} |
"""empty message
Revision ID: e95429507f03
Revises: 82e227787c88
Create Date: 2017-10-08 04:00:04.899374
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e95429507f03'
down_revision = '82e227787c88'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('client',
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('client_id', sa.String(length=100), nullable=False),
sa.Column('client_secret', sa.String(length=50), nullable=False),
sa.Column('_is_confidential', sa.Boolean(), nullable=False),
sa.Column('_allowed_grant_types', sa.Text(), nullable=False),
sa.Column('_redirect_uris', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('client_id')
)
op.create_index(op.f('ix_client_client_secret'), 'client', ['client_secret'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_client_client_secret'), table_name='client')
op.drop_table('client')
# ### end Alembic commands ###
| {
"content_hash": "8398d768106161f3b965530d5d425171",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 94,
"avg_line_length": 31.44736842105263,
"alnum_prop": 0.6778242677824268,
"repo_name": "coderadi/OAuth-server",
"id": "84286f4206bbc030b263752e5f3d2ff5dfe6e233",
"size": "1195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "migrations/versions/e95429507f03_.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "17127"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0035_auto_20160102_1442'),
]
operations = [
migrations.CreateModel(
name='HackSawBlade',
fields=[
('sablesawblade_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='products.SableSawBlade')),
],
options={
'verbose_name_plural': 'Metallhandsägeblätter',
'verbose_name': 'Metallhandsägeblatt',
},
bases=('products.sablesawblade',),
),
]
| {
"content_hash": "537bd6adb5ff17777e344420c6e5ec0e",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 206,
"avg_line_length": 31.24,
"alnum_prop": 0.6043533930857875,
"repo_name": "n2o/guhema",
"id": "60251d677ca6dde196648f2ebe6bc6d79855c4e9",
"size": "856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "products/migrations/0036_hacksawblade.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12685"
},
{
"name": "HTML",
"bytes": "158857"
},
{
"name": "Python",
"bytes": "151643"
}
],
"symlink_target": ""
} |
from .primitives import *
from .types import *
__all__ = (types.__all__ + primitives.__all__)
| {
"content_hash": "23bb473c2d62a7e8d98e82a1a7153ae3",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 46,
"avg_line_length": 23.75,
"alnum_prop": 0.6210526315789474,
"repo_name": "jamespeterschinner/async_v20",
"id": "7f81473c46566958aa273bc873d6406887a7f674",
"size": "95",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "async_v20/definitions/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5413428"
}
],
"symlink_target": ""
} |
import sys
import spacy
nlp = spacy.load('en')
with open(sys.argv[1], 'r') as infile:
sentences = infile.readlines()
for sentence in sentences:
sentence = sentence.strip()
parse = nlp(sentence)
print(parse.text)
for token in parse:
print(token.pos_,
token.tag_,
token.lemma_,
'<-%s-' % token.dep_,
token.head,
token.orth_)
print('--------------------------------------------------')
| {
"content_hash": "b4b1cc7156cc2056f76464e76690b655",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 63,
"avg_line_length": 25.63157894736842,
"alnum_prop": 0.4804928131416838,
"repo_name": "hawkrives/project-e.a.s.t.",
"id": "b0e08d22d5d9a76b4943a8912f6a4ce32b64192b",
"size": "487",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "syllogizmos/parsefile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19512"
}
],
"symlink_target": ""
} |
from marrow.package.loader import load
from .util import redact_uri
log = __import__('logging').getLogger(__name__)
class DBAPIConnection:
"""WebCore DBExtension interface for projects utilizing PEP 249 DB API database engines."""
uri_safety = True # Go to some effort to hide connection passwords from logs.
thread_safe = True # When False, create a connection for the duration of the request only.
def __init__(self, engine, uri, safe=True, protect=True, alias=None, **kw):
"""Prepare configuration options."""
self.engine = engine
self.uri = uri
self.safe = safe # Thread safe? When False, create a connection for the duration of a request only.
self.protect = protect
self.alias = alias
self.config = kw
self._connector = load(engine, 'db_api_connect')
if self.safe: # pragma: no cover
self.start = self._connect
self.stop = self._disconnect
else:
self.prepare = self._connect
self.done = self._disconnect
def __repr__(self):
return '{self.__class__.__name__}({self.alias}, "{self.engine}", "{uri}")'.format(
self = self,
uri = redact_uri(self.uri, self.protect),
)
def _connect(self, context):
"""Initialize the database connection."""
if __debug__:
log.info("Connecting " + self.engine.partition(':')[0] + " database layer.", extra=dict(
uri = redact_uri(self.uri, self.protect),
config = self.config,
alias = self.alias,
))
self.connection = context.db[self.alias] = self._connector(self.uri, **self.config)
def _disconnect(self, context):
"""Close the connection and clean up references."""
self.connection.close()
del self.connection
class SQLite3Connection(DBAPIConnection):
def __init__(self, path, alias=None, **kw):
super().__init__('sqlite3:connect', path, False, False, alias, **kw)
| {
"content_hash": "4d8e2cff10bcf2cf6c16e0b9dc472e77",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 102,
"avg_line_length": 29.934426229508198,
"alnum_prop": 0.6681270536692223,
"repo_name": "marrow/web.db",
"id": "e2c73cd64bf136b718442d0be400e1c650f21114",
"size": "1826",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "web/db/dbapi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "879"
},
{
"name": "Python",
"bytes": "23275"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('filer', '0009_auto_20171220_1635'),
]
operations = [
migrations.AlterField(
model_name='image',
name='file_ptr',
field=models.OneToOneField(primary_key=True, serialize=False, related_name='filer_image_file', parent_link=True, to='filer.File'),
),
]
| {
"content_hash": "d7c0b1bc790da5bd726819d0c3c69fea",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 142,
"avg_line_length": 26.055555555555557,
"alnum_prop": 0.6226012793176973,
"repo_name": "skirsdeda/django-filer",
"id": "f40c52a30927c922fdd2e6e4d2ff28d963d444b4",
"size": "493",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "filer/migrations/0010_auto_20180414_2058.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "91544"
},
{
"name": "HTML",
"bytes": "80942"
},
{
"name": "JavaScript",
"bytes": "58948"
},
{
"name": "Python",
"bytes": "346663"
},
{
"name": "Ruby",
"bytes": "1119"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
} |
import requests
import smtplib
exec(open(inemail.py))
def is_website_working(url, string_in_page):
try:
r = requests.get(url)
if not string_in_page in r.content or r.status_code !=200:
return False
return True
except:
return False
def send_mail(url, email):
sender = email
receiver = [email]
message = """From: <"""+email+""">
To: <"""+email+""">
Subject: WEBSITE MAIL
Your website """+url+ """ is down!.
"""
try:
smtpObj = smtplib.SMTP('localhost')
smtpObj.sendmail(sender, receiver, message)
print "Successfully sent email"
except:
print "Error: unable to send email"
def send_yo(username, url):
data={'api_token': "8784db87-d95d-4fad-92d6-3e1a10bd4200", 'username': username}
data["link"] = url
r = requests.post("http://api.justyo.co/yo/", data)
return r.status_code
def main():
f = open("/home/ec2-user/website_status/settings.txt", "r")
for line in f:
#print line
if line[0] != "#":
params = line.split()
#print(params)
if len(params) < 3:
print "There must be at least three parameters (yo_username is optional)"
print "url string email yo_username"
return False
url = params[0]
string = params[1]
email = params[2]
yo_username = ""
if len(params) == 4:
yo_username = params[3]
if not is_website_working(url, string):
if yo_username != "":
send_yo(yo_username, url)
send_mail(url, email)
print "Website "+url+" down :("
else:
print "Website "+url+" running status 200 :)"
if __name__ == '__main__':
main()
| {
"content_hash": "37f6504b19aaf18dd58ffb9bb314d3e0",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 89,
"avg_line_length": 26.89855072463768,
"alnum_prop": 0.5264008620689655,
"repo_name": "NicoHinderling/website_status",
"id": "8504d82b67a452b8c24c2ad61288bee773e8461e",
"size": "1899",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web_status.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2681"
},
{
"name": "Shell",
"bytes": "277"
}
],
"symlink_target": ""
} |
import projectors, computers, kipro
import pcpUtils as utils
def _printStartMenu(interfaces):
print "\n",
print "********************************\n" * 2
print "Welcome to the COL Production Control Panel!\n"
print "********************************\n" * 2
# list out devices
choices = {}
for i, interface in enumerate(interfaces):
id = i + 1
choices[id] = interface
print str(id) + ".\t" + choices[id]
print str(i + 2) + ".\tExit\n"
while True:
try:
choice = int(raw_input("Choose an interface: "))
except ValueError:
continue
if choice < 0 or choice > (len(interfaces) + 1): continue
if choice == (len(interfaces) + 1): exit()
break
# list out functions available to that device (use filter to only show options _ON)
options = utils.loadOptions(choices[choice]).keys()
if options == []:
print "No options available!"
exit()
enabled = {}
for j, option in enumerate(options):
id = j + 1
enabled[id] = option
print str(id) + ".\t" + enabled[id]
print str(j + 2) + ".\tExit\n"
while True:
try:
choice = int(raw_input("Choose an function: "))
except ValueError:
continue
if choice < 0 or choice > (len(interfaces) + 1): continue
if choice == (len(enabled) + 1): exit()
break
def _main():
interfaces = utils.getAllDeviceNames()
# Welcome to COL PCP
_printStartMenu(interfaces)
if __name__ == "__main__":
_main() | {
"content_hash": "937ea5ae727cec1e2c56b5b8ebfc362f",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 84,
"avg_line_length": 25.145454545454545,
"alnum_prop": 0.6167751265365148,
"repo_name": "redreceipt/pcp",
"id": "6b16a3165767fdb0d0a602a66d304f997824b4a8",
"size": "1422",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "startPCP.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12414"
}
],
"symlink_target": ""
} |
"""Beam DoFns for running Box Least Squares and processing the output."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import apache_beam as beam
from apache_beam.metrics import Metrics
import numpy as np
from box_least_squares import box_least_squares_pb2 as bls_pb2
from box_least_squares.python import box_least_squares
from experimental.beam.transit_search import bls_scorer
from light_curve import light_curve_pb2
def _max_duration(period, density_star):
return (period * 365.25**2 / (np.pi**3 * density_star * 215**3))**(1 / 3)
class GeneratePeriodogramDoFn(beam.DoFn):
"""Generates the BLS periodogram for a light curve."""
def __init__(self, all_periods, all_nbins, weight_min_factor,
duration_density_min, duration_min_days, duration_density_max,
duration_min_fraction):
"""Initializes the DoFn."""
self.all_periods = all_periods
self.all_nbins = all_nbins
self.max_nbins = max(self.all_nbins)
self.weight_min_factor = weight_min_factor
self.duration_density_min = duration_density_min
self.duration_min_days = duration_min_days
self.duration_density_max = duration_density_max
self.duration_min_fraction = duration_min_fraction
def process(self, inputs):
"""Generates the BLS periodogram for a light curve.
Args:
inputs: A tuple (key, light_curve_pb2.LightCurve)
Yields:
A tuple (key, box_least_squares_pb2.Periodogram)
"""
Metrics.counter(self.__class__.__name__, "inputs-seen").inc()
# Unpack the light curve.
lc = inputs["light_curve"]
time = np.array(lc.light_curve.time, dtype=np.float)
flux = np.array(lc.light_curve.flux, dtype=np.float)
norm_curve = np.array(lc.light_curve.norm_curve, dtype=np.float)
flux /= norm_curve # Normalize flux.
# Fit periodogram.
bls = box_least_squares.BoxLeastSquares(time, flux, capacity=self.max_nbins)
results = []
for period, nbins in itertools.izip(self.all_periods, self.all_nbins):
bin_width = period / nbins
# Compute the minimum number of bins for a transit.
duration_min = 0
if self.duration_density_max:
duration_min = self.duration_min_fraction * _max_duration(
period, density_star=self.duration_density_max)
if self.duration_min_days:
duration_min = max(self.duration_min_days, duration_min)
width_min = int(np.maximum(1, np.floor(duration_min / bin_width)))
# Compute the maximum number of bins for a transit.
if self.duration_density_min:
duration_max = _max_duration(
period, density_star=self.duration_density_min)
width_max = int(np.ceil(duration_max / bin_width))
else:
width_max = int(np.ceil(0.25 * nbins))
weight_min = self.weight_min_factor * width_min / nbins
weight_max = 1
options = bls_pb2.BlsOptions(
width_min=width_min,
width_max=width_max,
weight_min=weight_min,
weight_max=weight_max)
try:
result = bls.fit(period, nbins, options)
except ValueError:
Metrics.counter(self.__class__.__name__,
"bls-error-{}".format(inputs["kepler_id"])).inc()
return
results.append(result)
inputs["periodogram"] = bls_pb2.Periodogram(results=results)
yield inputs
def score_method_args_str(name, args):
args_str = ",".join(["{}={}".format(k, args[k]) for k in sorted(args.keys())])
return "{}:{}".format(name, args_str) if args_str else name
class TopResultsDoFn(beam.DoFn):
"""Computes the top scoring results of a BLS periodogram."""
def __init__(self, score_methods, ignore_negative_depth):
self.score_methods = score_methods
self.ignore_negative_depth = ignore_negative_depth
def process(self, inputs):
# Unpack the inputs.
results = list(inputs["periodogram"].results)
scorer = bls_scorer.BlsScorer(
results, ignore_negative_depth=self.ignore_negative_depth)
top_results = bls_pb2.TopResults()
for name, args in self.score_methods:
score, result = scorer.score(name, **args)
# Gather name and args into a single string.
score_method = score_method_args_str(name, args)
# top_results.scored_results.add(
# result=result, score_method=score_method, score=score)
scored_result = bls_pb2.ScoredResult(
result=result, score_method=score_method, score=score)
top_results.scored_results.extend([scored_result])
inputs["top_results"] = top_results
yield inputs
class GetTopResultDoFn(beam.DoFn):
"""Computes the top scoring results of a BLS periodogram."""
def __init__(self, score_method):
self.top_detection_score_method = score_method_args_str(*score_method)
def process(self, inputs):
# TODO(shallue): eventually stop outputting TopResults, and just do a
# ScoredResult.
top_result = None
for scored_result in inputs["top_results"].scored_results:
if scored_result.score_method == self.top_detection_score_method:
top_result = scored_result
if top_result is None:
raise ValueError(
"Score method {} not found".format(self.top_detection_score_method))
inputs["top_result"] = top_result
yield inputs
class PostProcessForNextDetectionDoFn(beam.DoFn):
"""Post processes for the next detection."""
def __init__(self, score_threshold=None):
self.score_threshold = score_threshold
def process(self, inputs):
top_result = inputs["top_result"]
if not self.score_threshold or top_result.score >= self.score_threshold:
events_to_remove = list(inputs["light_curve"].removed_events)
events_to_remove.append(
light_curve_pb2.PeriodicEvent(
period=top_result.result.period,
t0=top_result.result.epoch,
duration=top_result.result.duration))
outputs = {
"kepler_id": inputs["kepler_id"],
"raw_light_curve": inputs["raw_light_curve"],
"events_to_remove": events_to_remove
}
if "light_curve_for_predictions" in inputs:
outputs["light_curve_for_predictions"] = inputs[
"light_curve_for_predictions"]
yield outputs
| {
"content_hash": "9bcbc31a8636bfa2bf9c7082bd61f5f3",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 80,
"avg_line_length": 34.486338797814206,
"alnum_prop": 0.6629694184756774,
"repo_name": "google-research/exoplanet-ml",
"id": "83b56ddf122bd824b580e110b282071eada578ae",
"size": "6901",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "exoplanet-ml/experimental/beam/transit_search/bls_fns.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "139752"
},
{
"name": "Python",
"bytes": "570659"
},
{
"name": "Starlark",
"bytes": "21880"
}
],
"symlink_target": ""
} |
import decimal
from datetime import date
from django.conf import settings
from django.db import models
from django.core import urlresolvers
from django.contrib.auth.models import User, Group
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from shoppy.util.models import moneyfmt
from shoppy.util.unique import slugify
from shoppy import settings
from shoppy.shop.models import CartProduct, Order
class Product(models.Model):
orderno = models.CharField(max_length=50, unique=True, help_text=_("Lager- bzw. Produktnummer."))
taxClass = models.ForeignKey('TaxClass', verbose_name=_("Tax"))
isActive = models.BooleanField()
isStockable = models.BooleanField()
stock = models.PositiveIntegerField(null=True, blank=True)
picture = models.FileField(upload_to='images/', null=True, blank=True)
added_date = models.DateTimeField(auto_now_add=True)
max_order_amount = models.IntegerField(default=-1)
sort_no = models.IntegerField(default=0)
sites = models.ManyToManyField(Site)
def __unicode__(self):
return u'%s' % self.orderno
def get_price_str(self):
tmp = decimal.Decimal(self.get_price)
price = moneyfmt(tmp, places=2, dp=',')
if price == ',00':
price = '0,00'
return '%s' % (price)
def get_price(self, pricelist_id=3128):
try:
price = PricelistProduct.objects.get(pricelist__id=pricelist_id, product=self).price
return price
except( PricelistProduct.DoesNotExist ):
return 0
def get_price_notax(self):
price = (get_price(self) / (self.taxClass.percent + 100) * 100)
return price
def get_price_notax_str(self):
price = moneyfmt(self.get_price_notax(), places=2, dp=',')
return '%s' % price
def get_picture_url(self):
return "%simages/%s.jpg" % (settings.MEDIA_URL, self.orderno)
def get_thumbnail_url(self):
return "%simages/T_%s.jpg" % (settings.MEDIA_URL, self.orderno)
def get_absolute_url(self, langcode=settings.SHOPPY_DEFAULT_LANGUAGE_CODE):
tmp = slugify(self.get_option_desc(fieldnb=1))
return urlresolvers.reverse('shoppy_products_detail',
kwargs={'product_id': self.id, 'product_name':tmp})
def get_option_desc(self, fieldnb, langcode=settings.SHOPPY_DEFAULT_LANGUAGE_CODE):
try:
desc = Productoption.objects.get(language_code=langcode,product=self,optionfield__number=fieldnb).description
except( Productoption.DoesNotExist ):
if not langcode == settings.SHOPPY_DEFAULT_LANGUAGE_CODE:
try:
desc = Productoption.objects.get(language_code=settings.SHOPPY_DEFAULT_LANGUAGE_CODE,product=self,optionfield__number=fieldnb).description
except:
desc = ''
else:
desc = ''
return desc
def get_option_name(self, fieldnb, language_code=settings.SHOPPY_DEFAULT_LANGUAGE_CODE):
try:
name = Productoption.objects.get(language_code=language_code,product=self,optionfield__number=fieldnb).optionname
except( Productoption.DoesNotExist ):
if not language_code == settings.SHOPPY_DEFAULT_LANGUAGE_CODE:
try:
name = Productoption.objects.get(language__code=settings.SHOPPY_DEFAULT_LANGUAGE_CODE,product=self,optionfield__number=fieldnb).optionname
except:
name = ''
else:
name = ''
return name
def orderamount(self, dfrom=date(2009, 1, 1), dto=date.today()):
cp = CartProduct.objects.filter(product=self)
cartids = []
try:
for c in cp:
cartids.append(c.cart.id)
except:
pass
o = Order.objects.filter(order_date__gte=dfrom,order_date__lte=dto,cart__id__in=cartids,cancel_date__isnull=True)
menge = 0
for order in o:
menge += CartProduct.objects.get(cart=order.cart,product=self).amount
return menge
def rating(self):
r = Rating.objects.all().filter(product=self).aggregate(models.Avg('rating'))
return int(round(r['rating__avg']))
def rates(self):
r = Rating.objects.all().filter(product=self).count()
return r
class Pricelist(models.Model):
name = models.CharField(max_length=255)
sites = models.ManyToManyField(Site)
class PricelistProduct(models.Model):
pricelist = models.ForeignKey('Pricelist')
product = models.ForeignKey('Product')
price = models.DecimalField(max_digits=10,decimal_places=2, help_text=_("Aktueller Verkaufspreis"))
price_old = models.DecimalField(max_digits=10,decimal_places=2, help_text=_("Alter Verkausoreis. Z.B. zur Darstellung von Rabatt."))
class AuthGroupPricelist(models.Model):
name = models.CharField(max_length=255)
group = models.ForeignKey(Group)
sites = models.ManyToManyField(Site)
class Optionfield(models.Model):
number = models.IntegerField()
name = models.CharField(max_length=255, null=True, blank=True)
def __unicode__(self):
return u'%s - %s' % (self.number, self.name)
class Productoption(models.Model):
language_code = models.CharField(max_length=2,help_text=_("Sprachcode nach ISO 639-1."))
product = models.ForeignKey('Product')
optionfield = models.ForeignKey('Optionfield')
optionname = models.CharField(max_length=255, null=True, blank=True)
description = models.TextField()
def __unicode__(self):
return u'%s - %s' % (self.optionname, self.description)
class KeywordSection(models.Model):
"""
Unterteilt die einzelnen Keywords in unterschiedliche Sektionen. Z.B. Geschlecht
ist die Sektion von den Keywords männlich, weiblich
"""
language_code = models.CharField(max_length=2,help_text=_("Sprachcode nach ISO 639-1."))
name = models.CharField(max_length=200)
sites = models.ManyToManyField(Site)
sortno = models.IntegerField(help_text=_("An welcher Stelle soll diese Sektion angezeigt werden? von niedrig nach hoch."))
def __unicode__(self):
return u'%s - %s' % (self.language_code, self.name)
class Keyword(models.Model):
"""
Keyword ist einer Sektion zugehörig. Siehe Bsp von KeywordSection
"""
language_code = models.CharField(max_length=2,help_text=_("Sprachcode nach ISO 639-1."))
section = models.ForeignKey('KeywordSection')
name = models.CharField(max_length=200)
def __unicode__(self):
return u'%s - %s' % (self.section, self.name)
class KeywordProduct(models.Model):
"""
Einem Produkt können beliebig viele Keywords zugeordnet werden.
"""
keyword = models.ManyToManyField('Keyword',related_name='keywords', help_text=_("Die mit dem Produkt zusammenhaengenden Keywords auswaehlen."))
product = models.ForeignKey('Product')
def __unicode__(self):
return u'%s - %s' % (self.product, self.keyword)
class TaxClass(models.Model):
"""
Datentyp zum Festlegen der Steuern. Idr sollte hier 19 und 7 Prozent stehen.
"""
percent = models.PositiveIntegerField()
description = models.CharField(max_length=255, null=True, blank=True)
def __unicode__(self):
return u'%s' % self.percent
class MixGroup(models.Model):
name = models.CharField(max_length=50)
limit = models.IntegerField()
minimum = models.IntegerField()
order = models.IntegerField()
def __unicode__(self):
return u'%s' % (self.name)
class MixSize(models.Model):
unit = models.CharField(max_length=50)
amount = models.PositiveIntegerField()
def __unicode__(self):
return u'%s %s' % (self.amount, self.unit)
class MixIng(models.Model):
group = models.ForeignKey('MixGroup')
size = models.ForeignKey('MixSize')
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=10,decimal_places=2)
description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u'%s %s' % (self.name, self.size)
def get_price_str(self):
return '%s EUR' % moneyfmt(decimal.Decimal(self.price), places=2, dp=',')
def get_slug(self):
return slugify(self.name)
class MixProduct(models.Model):
user = models.ForeignKey(User, null=True, blank=True)
creat_date = models.DateTimeField(auto_now_add=True)
name = models.CharField(max_length=255, null=True, blank=True)
isOrdered = models.BooleanField()
isActive = models.BooleanField()
def __unicode__(self):
if self.name and not self.name == '':
return u'%s' % self.name
return 'MIX%s' % self.id
def get_price(self):
total = decimal.Decimal('2.5')
for mpi in MixProductIng.objects.filter(product=self):
total += mpi.ing.price
return total
def get_price_str(self):
return '%s EUR' % moneyfmt(decimal.Decimal(self.get_price()), places=2, dp=',')
def get_orderno(self):
return 'M100%s' % self.id
def get_ml(self):
max = 0
for mpi in MixProductIng.objects.filter(product=self):
if mpi.ing.size.amount > max:
max = mpi.ing.size.amount
return max
def get_ml100price(self):
return decimal.Decimal(self.get_price()) / self.get_ml() * 100
def get_ml100price_str(self):
return moneyfmt(decimal.Decimal(self.get_price()) / self.get_ml() * 100, places=2, dp=',')
def get_absolute_url(self, langcode=settings.SHOPPY_DEFAULT_LANGUAGE_CODE):
return urlresolvers.reverse('shoppy_products_mixdetail',
kwargs={'mixproduct_id': self.id})
def get_picture_name(self):
name = 'nopic'
for mpi in MixProductIng.objects.filter(product=self):
if mpi.ing.group.order == 2:
name = mpi.ing.get_slug()
return name
class MixProductIng(models.Model):
product = models.ForeignKey('MixProduct')
ing = models.ForeignKey('MixIng')
class Rating(models.Model):
"""
Hier wird einem Produkt eine Bewertung gegeben. Bewertungen können nur von
registrieren Usern abgegeben werden.
"""
product = models.ForeignKey('Product',help_text=_(u"Welches Produkt wird bewertet?"))
user = models.ForeignKey(User,help_text=_(u"Eine Bewertung muss von einem Mitglied abgegeben worden sein."))
rating = models.IntegerField(help_text=_(u"Bewertung für das Produkt. idr 1-5, 1=schlecht 5=sehr gut."))
text = models.TextField(help_text=_(u"Jede Bewertung sollte auch entsprechend begründet sein."))
added_date = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(help_text=_(u"Eine Bewertung kann deaktiviert sein/werden."))
def __unicode__(self):
return u'%s von %s' % (self.product, self.user.username)
class LinkSection(models.Model):
"""
Die Produktverküpfungen können einer kategorie zugeordnet werden. Z.B. "passend"
"auch gekauft" oder sowas.
"""
name = models.CharField(max_length=255)
class Link(models.Model):
"""
Produkte können untereinander verknüpft werden. Z.B. weil sie ähnlich sind oder
zueinander passen.
"""
section = models.ForeignKey('LinkSection')
productmain = models.ForeignKey('Product',
help_text=_(u"Hier steht das Hauptprodukt."))
productlinks = models.ManyToManyField('Product',
related_name='Product Links',
help_text=_(u"Die mit dem Hauptprodukt zusammenhaengenden Produkte auswaehlen."))
| {
"content_hash": "a204318341698eb8247958786e145273",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 158,
"avg_line_length": 42.24817518248175,
"alnum_prop": 0.6609364201796821,
"repo_name": "pocketone/django-shoppy",
"id": "888108b2fed20c929322cfd62cd1b4ad49091408",
"size": "11624",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shoppy/product/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import itertools
from ..language.ast import Document
def concat_ast(asts):
return Document(definitions=list(itertools.chain.from_iterable(
document.definitions for document in asts
)))
| {
"content_hash": "a5e9250057b4ce3ce0238fb1580186cd",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 67,
"avg_line_length": 22.666666666666668,
"alnum_prop": 0.7401960784313726,
"repo_name": "wandb/client",
"id": "9abebe9245f4d34ef5af35d5cf47d412d0d29398",
"size": "204",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "wandb/vendor/graphql-core-1.1/wandb_graphql/utils/concat_ast.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4902"
},
{
"name": "Dockerfile",
"bytes": "3491"
},
{
"name": "Jupyter Notebook",
"bytes": "7751"
},
{
"name": "Makefile",
"bytes": "1863"
},
{
"name": "Objective-C",
"bytes": "80764"
},
{
"name": "Python",
"bytes": "3634228"
},
{
"name": "Shell",
"bytes": "4662"
}
],
"symlink_target": ""
} |
"""
Python library for the AR.Drone.
This module was tested with Python 2.6.6 and AR.Drone vanilla firmware 1.5.1.
"""
import socket
import struct
import sys
import threading
import multiprocessing
import arnetwork
__author__ = "Bastian Venthur"
ARDRONE_NAVDATA_PORT = 5554
ARDRONE_VIDEO_PORT = 5555
ARDRONE_COMMAND_PORT = 5556
class ARDrone(object):
"""ARDrone Class.
Instanciate this class to control your drone and receive decoded video and
navdata.
"""
def __init__(self):
self.seq_nr = 1
self.timer_t = 0.2
self.com_watchdog_timer = threading.Timer(self.timer_t, self.commwdg)
self.lock = threading.Lock()
self.speed = 0.2
self.at(at_config, "general:navdata_demo", "TRUE")
self.video_pipe, video_pipe_other = multiprocessing.Pipe()
self.nav_pipe, nav_pipe_other = multiprocessing.Pipe()
self.com_pipe, com_pipe_other = multiprocessing.Pipe()
self.network_process = arnetwork.ARDroneNetworkProcess(nav_pipe_other, video_pipe_other, com_pipe_other)
self.network_process.start()
self.ipc_thread = arnetwork.IPCThread(self)
self.ipc_thread.start()
self.image = ""
self.navdata = dict()
self.time = 0
def takeoff(self):
"""Make the drone takeoff."""
self.at(at_ftrim)
self.at(at_config, "control:altitude_max", "20000")
self.at(at_ref, True)
def land(self):
"""Make the drone land."""
self.at(at_ref, False)
def hover(self):
"""Make the drone hover."""
self.at(at_pcmd, False, 0, 0, 0, 0)
def move_left(self):
"""Make the drone move left."""
self.at(at_pcmd, True, -self.speed, 0, 0, 0)
def move_right(self):
"""Make the drone move right."""
self.at(at_pcmd, True, self.speed, 0, 0, 0)
def move_up(self):
"""Make the drone rise upwards."""
self.at(at_pcmd, True, 0, 0, self.speed, 0)
def move_down(self):
"""Make the drone decent downwards."""
self.at(at_pcmd, True, 0, 0, -self.speed, 0)
def move_forward(self):
"""Make the drone move forward."""
self.at(at_pcmd, True, 0, -self.speed, 0, 0)
def move_backward(self):
"""Make the drone move backwards."""
self.at(at_pcmd, True, 0, self.speed, 0, 0)
def turn_left(self):
"""Make the drone rotate left."""
self.at(at_pcmd, True, 0, 0, 0, -self.speed)
def turn_right(self):
"""Make the drone rotate right."""
self.at(at_pcmd, True, 0, 0, 0, self.speed)
def perform_op(self, mlmr, mfmb, mumd, rlrr):
"""mlmr, -mfmb, mumd, rlrr preforms the set operation positive is right, forward, and rotate right speeds should be in the range of -1 to 1"""
self.at(at_pcmd, True, mlmr, -mfmb, mumd, rlrr)
def reset(self):
"""Toggle the drone's emergency state."""
self.at(at_ref, False, True)
self.at(at_ref, False, False)
def trim(self):
"""Flat trim the drone."""
self.at(at_ftrim)
def set_speed(self, speed):
"""Set the drone's speed.
Valid values are floats from [0..1]
"""
self.speed = speed
def at(self, cmd, *args, **kwargs):
"""Wrapper for the low level at commands.
This method takes care that the sequence number is increased after each
at command and the watchdog timer is started to make sure the drone
receives a command at least every second.
"""
self.lock.acquire()
self.com_watchdog_timer.cancel()
cmd(self.seq_nr, *args, **kwargs)
self.seq_nr += 1
self.com_watchdog_timer = threading.Timer(self.timer_t, self.commwdg)
self.com_watchdog_timer.start()
self.lock.release()
def commwdg(self):
"""Communication watchdog signal.
This needs to be send regulary to keep the communication w/ the drone
alive.
"""
self.at(at_comwdg)
def halt(self):
"""Shutdown the drone.
This method does not land or halt the actual drone, but the
communication with the drone. You should call it at the end of your
application to close all sockets, pipes, processes and threads related
with this object.
"""
self.lock.acquire()
self.com_watchdog_timer.cancel()
self.com_pipe.send('die!')
self.network_process.terminate()
self.network_process.join()
self.ipc_thread.stop()
self.ipc_thread.join()
self.lock.release()
###############################################################################
### Low level AT Commands
###############################################################################
def at_ref(seq, takeoff, emergency=False):
"""
Basic behaviour of the drone: take-off/landing, emergency stop/reset)
Parameters:
seq -- sequence number
takeoff -- True: Takeoff / False: Land
emergency -- True: Turn of the engines
"""
p = 0b10001010101000000000000000000
if takeoff:
p += 0b1000000000
if emergency:
p += 0b0100000000
at("REF", seq, [p])
def at_pcmd(seq, progressive, lr, fb, vv, va):
"""
Makes the drone move (translate/rotate).
Parameters:
seq -- sequence number
progressive -- True: enable progressive commands, False: disable (i.e.
enable hovering mode)
lr -- left-right tilt: float [-1..1] negative: left, positive: right
rb -- front-back tilt: float [-1..1] negative: forwards, positive:
backwards
vv -- vertical speed: float [-1..1] negative: go down, positive: rise
va -- angular speed: float [-1..1] negative: spin left, positive: spin
right
The above float values are a percentage of the maximum speed.
"""
p = 1 if progressive else 0
at("PCMD", seq, [p, float(lr), float(fb), float(vv), float(va)])
def at_ftrim(seq):
"""
Tell the drone it's lying horizontally.
Parameters:
seq -- sequence number
"""
at("FTRIM", seq, [])
def at_zap(seq, stream):
"""
Selects which video stream to send on the video UDP port.
Parameters:
seq -- sequence number
stream -- Integer: video stream to broadcast
"""
# FIXME: improve parameters to select the modes directly
at("ZAP", seq, [stream])
def at_config(seq, option, value):
"""Set configuration parameters of the drone."""
at("CONFIG", seq, [str(option), str(value)])
def at_comwdg(seq):
"""
Reset communication watchdog.
"""
# FIXME: no sequence number
at("COMWDG", seq, [])
def at_aflight(seq, flag):
"""
Makes the drone fly autonomously.
Parameters:
seq -- sequence number
flag -- Integer: 1: start flight, 0: stop flight
"""
at("AFLIGHT", seq, [flag])
def at_pwm(seq, m1, m2, m3, m4):
"""
Sends control values directly to the engines, overriding control loops.
Parameters:
seq -- sequence number
m1 -- front left command
m2 -- fright right command
m3 -- back right command
m4 -- back left command
"""
# FIXME: what type do mx have?
pass
def at_led(seq, anim, f, d):
"""
Control the drones LED.
Parameters:
seq -- sequence number
anim -- Integer: animation to play
f -- ?: frequence in HZ of the animation
d -- Integer: total duration in seconds of the animation
"""
pass
def at_anim(seq, anim, d):
"""
Makes the drone execute a predefined movement (animation).
Parameters:
seq -- sequcence number
anim -- Integer: animation to play
d -- Integer: total duration in sections of the animation
"""
at("ANIM", seq, [anim, d])
def at(command, seq, params):
"""
Parameters:
command -- the command
seq -- the sequence number
params -- a list of elements which can be either int, float or string
"""
param_str = ''
for p in params:
if type(p) == int:
param_str += ",%d" % p
elif type(p) == float:
param_str += ",%d" % f2i(p)
elif type(p) == str:
param_str += ',"'+p+'"'
msg = "AT*%s=%i%s\r" % (command, seq, param_str)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(msg, ("192.168.1.1", ARDRONE_COMMAND_PORT))
def f2i(f):
"""Interpret IEEE-754 floating-point value as signed integer.
Arguments:
f -- floating point value
"""
return struct.unpack('i', struct.pack('f', f))[0]
###############################################################################
### navdata
###############################################################################
def decode_navdata(packet):
"""Decode a navdata packet."""
offset = 0
_ = struct.unpack_from("IIII", packet, offset)
drone_state = dict()
drone_state['fly_mask'] = _[1] & 1 # FLY MASK : (0) ardrone is landed, (1) ardrone is flying
drone_state['video_mask'] = _[1] >> 1 & 1 # VIDEO MASK : (0) video disable, (1) video enable
drone_state['vision_mask'] = _[1] >> 2 & 1 # VISION MASK : (0) vision disable, (1) vision enable */
drone_state['control_mask'] = _[1] >> 3 & 1 # CONTROL ALGO (0) euler angles control, (1) angular speed control */
drone_state['altitude_mask'] = _[1] >> 4 & 1 # ALTITUDE CONTROL ALGO : (0) altitude control inactive (1) altitude control active */
drone_state['user_feedback_start'] = _[1] >> 5 & 1 # USER feedback : Start button state */
drone_state['command_mask'] = _[1] >> 6 & 1 # Control command ACK : (0) None, (1) one received */
drone_state['fw_file_mask'] = _[1] >> 7 & 1 # Firmware file is good (1) */
drone_state['fw_ver_mask'] = _[1] >> 8 & 1 # Firmware update is newer (1) */
drone_state['fw_upd_mask'] = _[1] >> 9 & 1 # Firmware update is ongoing (1) */
drone_state['navdata_demo_mask'] = _[1] >> 10 & 1 # Navdata demo : (0) All navdata, (1) only navdata demo */
drone_state['navdata_bootstrap'] = _[1] >> 11 & 1 # Navdata bootstrap : (0) options sent in all or demo mode, (1) no navdata options sent */
drone_state['motors_mask'] = _[1] >> 12 & 1 # Motor status : (0) Ok, (1) Motors problem */
drone_state['com_lost_mask'] = _[1] >> 13 & 1 # Communication lost : (1) com problem, (0) Com is ok */
drone_state['vbat_low'] = _[1] >> 15 & 1 # VBat low : (1) too low, (0) Ok */
drone_state['user_el'] = _[1] >> 16 & 1 # User Emergency Landing : (1) User EL is ON, (0) User EL is OFF*/
drone_state['timer_elapsed'] = _[1] >> 17 & 1 # Timer elapsed : (1) elapsed, (0) not elapsed */
drone_state['angles_out_of_range'] = _[1] >> 19 & 1 # Angles : (0) Ok, (1) out of range */
drone_state['ultrasound_mask'] = _[1] >> 21 & 1 # Ultrasonic sensor : (0) Ok, (1) deaf */
drone_state['cutout_mask'] = _[1] >> 22 & 1 # Cutout system detection : (0) Not detected, (1) detected */
drone_state['pic_version_mask'] = _[1] >> 23 & 1 # PIC Version number OK : (0) a bad version number, (1) version number is OK */
drone_state['atcodec_thread_on'] = _[1] >> 24 & 1 # ATCodec thread ON : (0) thread OFF (1) thread ON */
drone_state['navdata_thread_on'] = _[1] >> 25 & 1 # Navdata thread ON : (0) thread OFF (1) thread ON */
drone_state['video_thread_on'] = _[1] >> 26 & 1 # Video thread ON : (0) thread OFF (1) thread ON */
drone_state['acq_thread_on'] = _[1] >> 27 & 1 # Acquisition thread ON : (0) thread OFF (1) thread ON */
drone_state['ctrl_watchdog_mask'] = _[1] >> 28 & 1 # CTRL watchdog : (1) delay in control execution (> 5ms), (0) control is well scheduled */
drone_state['adc_watchdog_mask'] = _[1] >> 29 & 1 # ADC Watchdog : (1) delay in uart2 dsr (> 5ms), (0) uart2 is good */
drone_state['com_watchdog_mask'] = _[1] >> 30 & 1 # Communication Watchdog : (1) com problem, (0) Com is ok */
drone_state['emergency_mask'] = _[1] >> 31 & 1 # Emergency landing : (0) no emergency, (1) emergency */
data = dict()
data['drone_state'] = drone_state
data['header'] = _[0]
data['seq_nr'] = _[2]
data['vision_flag'] = _[3]
offset += struct.calcsize("IIII")
while 1:
try:
id_nr, size = struct.unpack_from("HH", packet, offset)
offset += struct.calcsize("HH")
except struct.error:
break
values = []
for i in range(size-struct.calcsize("HH")):
values.append(struct.unpack_from("c", packet, offset)[0])
offset += struct.calcsize("c")
# navdata_tag_t in navdata-common.h
if id_nr == 0:
values = struct.unpack_from("IIfffIfffI", "".join(values))
values = dict(zip(['ctrl_state', 'battery', 'theta', 'phi', 'psi', 'altitude', 'vx', 'vy', 'vz', 'num_frames'], values))
# convert the millidegrees into degrees and round to int, as they
# are not so precise anyways
for i in 'theta', 'phi', 'psi':
values[i] = int(values[i] / 1000)
#values[i] /= 1000
data[id_nr] = values
return data
if __name__ == "__main__":
import termios
import fcntl
import os
fd = sys.stdin.fileno()
oldterm = termios.tcgetattr(fd)
newattr = termios.tcgetattr(fd)
newattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, newattr)
oldflags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags | os.O_NONBLOCK)
drone = ARDrone()
try:
while 1:
try:
c = sys.stdin.read(1)
c = c.lower()
print "Got character", c
if c == 'a':
drone.move_left()
if c == 'd':
drone.move_right()
if c == 'w':
drone.move_forward()
if c == 's':
drone.move_backward()
if c == ' ':
drone.land()
if c == '\n':
drone.takeoff()
if c == 'q':
drone.turn_left()
if c == 'e':
drone.turn_right()
if c == '1':
drone.move_up()
if c == '2':
drone.hover()
if c == '3':
drone.move_down()
if c == 't':
drone.reset()
if c == 'x':
drone.hover()
if c == 'y':
drone.trim()
except IOError:
pass
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags)
drone.halt()
| {
"content_hash": "6909c3acd20bf5a22565708977a306aa",
"timestamp": "",
"source": "github",
"line_count": 426,
"max_line_length": 150,
"avg_line_length": 35.21830985915493,
"alnum_prop": 0.5465573551956275,
"repo_name": "kfreedland/quadrotor",
"id": "95ac214b27c25052f8ebffa04480e6e92e5fa44e",
"size": "16099",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libardrone.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20839"
}
],
"symlink_target": ""
} |
from typing import Optional
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import (
UniformFloatHyperparameter,
UnParametrizedHyperparameter,
)
from autosklearn.askl_typing import FEAT_TYPE_TYPE
from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm
from autosklearn.pipeline.constants import DENSE, PREDICTIONS, UNSIGNED_DATA
from autosklearn.util.common import check_for_bool
class ARDRegression(AutoSklearnRegressionAlgorithm):
def __init__(
self,
n_iter,
tol,
alpha_1,
alpha_2,
lambda_1,
lambda_2,
threshold_lambda,
fit_intercept,
random_state=None,
):
self.random_state = random_state
self.estimator = None
self.n_iter = n_iter
self.tol = tol
self.alpha_1 = alpha_1
self.alpha_2 = alpha_2
self.lambda_1 = lambda_1
self.lambda_2 = lambda_2
self.threshold_lambda = threshold_lambda
self.fit_intercept = fit_intercept
def fit(self, X, y):
from sklearn.linear_model import ARDRegression
self.n_iter = int(self.n_iter)
self.tol = float(self.tol)
self.alpha_1 = float(self.alpha_1)
self.alpha_2 = float(self.alpha_2)
self.lambda_1 = float(self.lambda_1)
self.lambda_2 = float(self.lambda_2)
self.threshold_lambda = float(self.threshold_lambda)
self.fit_intercept = check_for_bool(self.fit_intercept)
self.estimator = ARDRegression(
n_iter=self.n_iter,
tol=self.tol,
alpha_1=self.alpha_1,
alpha_2=self.alpha_2,
lambda_1=self.lambda_1,
lambda_2=self.lambda_2,
compute_score=False,
threshold_lambda=self.threshold_lambda,
fit_intercept=True,
normalize=False,
copy_X=False,
verbose=False,
)
if y.ndim == 2 and y.shape[1] == 1:
y = y.flatten()
self.estimator.fit(X, y)
return self
def predict(self, X):
if self.estimator is None:
raise NotImplementedError
return self.estimator.predict(X)
@staticmethod
def get_properties(dataset_properties=None):
return {
"shortname": "ARD",
"name": "ARD Regression",
"handles_regression": True,
"handles_classification": False,
"handles_multiclass": False,
"handles_multilabel": False,
"handles_multioutput": False,
"prefers_data_normalized": True,
"is_deterministic": True,
"input": (DENSE, UNSIGNED_DATA),
"output": (PREDICTIONS,),
}
@staticmethod
def get_hyperparameter_search_space(
feat_type: Optional[FEAT_TYPE_TYPE] = None, dataset_properties=None
):
cs = ConfigurationSpace()
n_iter = UnParametrizedHyperparameter("n_iter", value=300)
tol = UniformFloatHyperparameter(
"tol", 10**-5, 10**-1, default_value=10**-3, log=True
)
alpha_1 = UniformFloatHyperparameter(
name="alpha_1", lower=10**-10, upper=10**-3, default_value=10**-6
)
alpha_2 = UniformFloatHyperparameter(
name="alpha_2",
log=True,
lower=10**-10,
upper=10**-3,
default_value=10**-6,
)
lambda_1 = UniformFloatHyperparameter(
name="lambda_1",
log=True,
lower=10**-10,
upper=10**-3,
default_value=10**-6,
)
lambda_2 = UniformFloatHyperparameter(
name="lambda_2",
log=True,
lower=10**-10,
upper=10**-3,
default_value=10**-6,
)
threshold_lambda = UniformFloatHyperparameter(
name="threshold_lambda",
log=True,
lower=10**3,
upper=10**5,
default_value=10**4,
)
fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True")
cs.add_hyperparameters(
[
n_iter,
tol,
alpha_1,
alpha_2,
lambda_1,
lambda_2,
threshold_lambda,
fit_intercept,
]
)
return cs
| {
"content_hash": "db5e946dd1102f198ba60f6482b0ea6e",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 79,
"avg_line_length": 29.892617449664428,
"alnum_prop": 0.5484957341715312,
"repo_name": "automl/auto-sklearn",
"id": "758c4b04d73ed54064b4e3f5f028777062e022d3",
"size": "4454",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "autosklearn/pipeline/components/regression/ard_regression.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "950"
},
{
"name": "Makefile",
"bytes": "3513"
},
{
"name": "Python",
"bytes": "2008151"
},
{
"name": "Shell",
"bytes": "4744"
}
],
"symlink_target": ""
} |
import pika
import uuid
import tasks.task_pb2 as pb
import requests
import json
import re
#1from tasks.task_pb2 import Response
import sys
class imageRpcClient(object):
def __init__(self):
self.connection = pika.BlockingConnection(pika.ConnectionParameters(
host='master'))
self.channel = self.connection.channel()
result = self.channel.queue_declare(exclusive=True)
self.callback_queue = result.method.queue
self.channel.basic_consume(self.on_response, no_ack=True,
queue=self.callback_queue)
def on_response(self, ch, method, props, body):
if self.corr_id == props.correlation_id:
self.response = body
def call(self, n):
self.response = None
self.corr_id = str(uuid.uuid4())
self.channel.basic_publish(exchange='',
routing_key='rpc_queue2',
properties=pika.BasicProperties(
reply_to = self.callback_queue,
correlation_id = self.corr_id,
priority=n.priority,
),
body=n.SerializeToString()
)
while self.response is None:
self.connection.process_data_events()
return self.response
url = "http://localhost:3000/files"
#file_path = "sender.py" #la direccion del archivo a enviar
task= pb.Task()
if len(sys.argv) < 4:
print ("Please supply an input and output filename e.g. go run rpc_client.go input.jpg output.jpg jpg 1")
sys.exit(-1)
elif len(sys.argv) == 4:
#files = {'file': open(sys.argv[1], 'rb')}
#r = requests.post(url, files=files)
#json_data = json.loads(r.text)
#file_id = json_data["_id"]
task.priority=0
task.filename = sys.argv[1] #file_path
task.new_name = sys.argv[2]
#task.FileId = sys.argv[3]
else:
#files = {'file': open(sys.argv[1], 'rb')}
#r = requests.post(url, files=files)
#json_data = json.loads(r.text)
#file_id = json_data["_id"]
task.filename = sys.argv[1]
task.new_name = sys.argv[2]
task.priority = int(sys.argv[4])
#task.FileId = file_id
image_rpc = imageRpcClient()
print(" [x] Requesting image")
response = image_rpc.call(task)
res=pb.Response()
res.ParseFromString(response)
file_loc=res.file_location
print(" [.] Image processed found in %s" % file_loc)
| {
"content_hash": "df941eb97a1945678ed8800deb513406",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 107,
"avg_line_length": 31.580246913580247,
"alnum_prop": 0.5684128225175918,
"repo_name": "fercamp09/elastic-transcoder",
"id": "9e72e930daa211faba484a8c44b62b9929c331eb",
"size": "2580",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rpc_clientpy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "111"
},
{
"name": "Go",
"bytes": "13388"
},
{
"name": "HTML",
"bytes": "442"
},
{
"name": "JavaScript",
"bytes": "10354"
},
{
"name": "Makefile",
"bytes": "413"
},
{
"name": "Protocol Buffer",
"bytes": "1008"
},
{
"name": "Python",
"bytes": "3305"
},
{
"name": "Ruby",
"bytes": "4019"
}
],
"symlink_target": ""
} |
import os
import re
import sys
import subprocess
import StringIO
from glob import glob
test_dir = "tests"
luajit_exec = "luajit"
luajit_x = "./src/luajit-x"
diff_exec = "diff"
def lua_files(test_dir):
for dirpath, dirnames, filenames in os.walk(test_dir):
for filename in sorted(filenames):
m = re.match(r'([^.]+)\.lua$', filename)
if m:
yield dirpath, m.group(1)
class LabelSource:
def __init__(self):
self.index = 1
self.defs = {}
def get(self, lab):
if not lab in self.defs:
self.defs[lab] = "X%03d" % self.index
self.index += 1
return self.defs[lab]
def proto_lines(bcfile):
for line in bcfile:
if re.match(r'\s*$', line): break
yield line
def normalize(source, outfile):
labels = LabelSource()
for line in source:
rline = None
m = re.match(r'(\d{4}) ( |=>) (.*)', line)
lab, ref, rem = m.groups()
rem = re.sub(r'\r+', r'', rem)
mr = re.match(r'([A-Z0-9]+\s+)(\d+) => (\d+)(.*)', rem)
if mr:
ins, reg, jmp, xrem = mr.groups()
jmp = labels.get(jmp)
rem = "%s%s => %s%s" % (ins, reg, jmp, xrem)
if ref == '=>':
lab = labels.get(lab)
else:
lab = " "
rline = "%4s %s %s\n" % (lab, ref, rem)
outfile.write(rline)
def parse(bcfile, outfile):
for line in bcfile:
m = re.match(r'-- BYTECODE -- ', line)
if m:
outfile.write(line)
normalize(proto_lines(bcfile), outfile)
def do_process(cmd, dst):
src = subprocess.Popen(cmd, stdout = subprocess.PIPE).stdout
parse(src, dst)
def do_process_output(cmd):
sf = StringIO.StringIO()
do_process(cmd, sf)
s = sf.getvalue()
sf.close()
return s
def expected_bytecode(name, fullname):
s = do_process_output([luajit_exec, "-bl", fullname])
yield s, "luajit"
expect_dir = os.path.join("tests", "expect")
for expect_filename in glob(os.path.join(expect_dir, "*.txt")):
efilename = os.path.basename(expect_filename)
m = re.match(r'([^.]+)\.(expect\d+)\.txt$', efilename)
if m and m.group(1) == name:
ef = open(expect_filename, "r")
sf = StringIO.StringIO()
parse(ef, sf)
s = sf.getvalue()
ef.close()
sf.close()
yield s, m.group(2)
def write_diff(a, b, a_name, b_name):
fna = "tests/log/%s.txt" % a_name
fnb = "tests/log/%s.%s.txt" % (a_name, b_name)
af = open(fna, "w")
bf = open(fnb, "w")
af.write(a)
bf.write(b)
af.close()
bf.close()
diff_output = subprocess.Popen([diff_exec, "-U", "4", fna, fnb], stdout=subprocess.PIPE).communicate()[0]
diff_file = open("tests/log/%s.%s.diff" % (a_name, b_name), "w")
diff_file.write(diff_output)
diff_file.close()
def compare_to_ref(name, fullname, output_test):
for s, source in expected_bytecode(name, fullname):
if s == output_test:
return "pass", source
else:
write_diff(output_test, s, name, source)
return "fail", None
if not os.path.isdir("tests/log"):
try:
print "Creating directory tests/log..."
os.mkdir("tests/log")
except:
print "Error creating directory tests/log."
sys.exit(1)
try:
subprocess.check_call([luajit_exec, "-e", ""])
except:
print "Error calling luajit."
print "Please make sure that luajit executable is in the current PATH."
sys.exit(1)
for filename in glob("tests/log/*"):
os.remove(filename)
for dirpath, name in lua_files(test_dir):
fullname = os.path.join(dirpath, name + ".lua")
output_test = do_process_output([luajit_x, "-bl", fullname])
msg, source = compare_to_ref(name, fullname, output_test)
led = " " if msg == "pass" else "*"
msg_ext = "%s / %s" % (msg, source) if source and source != "luajit" else msg
print("%s %-24s%s" % (led, name, msg_ext))
| {
"content_hash": "d2d22ade52d6c9513370336576964ddd",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 106,
"avg_line_length": 25.510791366906474,
"alnum_prop": 0.6291596164692611,
"repo_name": "gaoxiaojun/symphony",
"id": "f9d9b5546da59099e3b0e44053614e5be2e9adec",
"size": "3546",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ref/luajit-lang-toolkit/scripts/test-bytecode.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "139072"
},
{
"name": "Batchfile",
"bytes": "73246"
},
{
"name": "C",
"bytes": "30921714"
},
{
"name": "C#",
"bytes": "15495"
},
{
"name": "C++",
"bytes": "258761"
},
{
"name": "CMake",
"bytes": "20197"
},
{
"name": "CSS",
"bytes": "19482"
},
{
"name": "Groff",
"bytes": "24272"
},
{
"name": "HTML",
"bytes": "957694"
},
{
"name": "Haskell",
"bytes": "13352"
},
{
"name": "Lex",
"bytes": "5491"
},
{
"name": "Lua",
"bytes": "1385572"
},
{
"name": "Makefile",
"bytes": "173584"
},
{
"name": "Objective-C",
"bytes": "22731"
},
{
"name": "Perl",
"bytes": "176777"
},
{
"name": "Protocol Buffer",
"bytes": "35719"
},
{
"name": "Python",
"bytes": "9610"
},
{
"name": "QMake",
"bytes": "20054"
},
{
"name": "Rust",
"bytes": "1494"
},
{
"name": "Shell",
"bytes": "643419"
},
{
"name": "Tcl",
"bytes": "721239"
},
{
"name": "TeX",
"bytes": "2454"
},
{
"name": "XSLT",
"bytes": "303"
},
{
"name": "Yacc",
"bytes": "74504"
}
],
"symlink_target": ""
} |
import argparse
import pytest
from paasta_tools.cli.cli import get_argparser
from paasta_tools.cli.cli import main
def each_command():
parser = get_argparser()
# We're doing some wacky inspection here, let's make sure things are sane
subparsers, = [
action
for action in parser._actions
if isinstance(action, argparse._SubParsersAction)
]
# Remove our dummy help command, paasta help --help is nonsense
choices = tuple(set(subparsers.choices) - {'help'})
assert choices
assert 'local-run' in choices
return choices
@pytest.mark.parametrize('cmd', each_command())
def test_help(cmd, capfd):
# Should pass and produce something
with pytest.raises(SystemExit) as excinfo:
main((cmd, '--help'))
assert excinfo.value.code == 0
assert cmd in capfd.readouterr()[0]
def test_invalid_arguments_returns_non_zero():
with pytest.raises(SystemExit) as excinfo:
main(('get-latest-deployment', '--herp'))
assert excinfo.value.code == 1
| {
"content_hash": "c52183de429c66d066fb95b490130bd8",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 77,
"avg_line_length": 28.583333333333332,
"alnum_prop": 0.6822157434402333,
"repo_name": "somic/paasta",
"id": "79d1a4e7945fb1c9dccbd58a63c16688c66594c3",
"size": "1607",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/cli/test_cmds_help.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "71885"
},
{
"name": "Makefile",
"bytes": "6598"
},
{
"name": "Python",
"bytes": "3231060"
},
{
"name": "Shell",
"bytes": "16324"
}
],
"symlink_target": ""
} |
from django import forms
from django.db.models import get_model
from django.forms.util import ErrorList
# save file as : forms.py
# use adminform[tab] to create your items! | {
"content_hash": "707f261253cff4ca1ae6e6538bc634d3",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 42,
"avg_line_length": 28.833333333333332,
"alnum_prop": 0.7861271676300579,
"repo_name": "vigo/my-custom-textmate1-bundle",
"id": "c0bbdb72f1c2176b963a620167937421e5b0c6b1",
"size": "310",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Templates/DJANGO : forms_py.tmTemplate/forms.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "AppleScript",
"bytes": "2104"
},
{
"name": "CSS",
"bytes": "116078"
},
{
"name": "HTML",
"bytes": "34320"
},
{
"name": "JavaScript",
"bytes": "5391"
},
{
"name": "Python",
"bytes": "11266"
},
{
"name": "Ruby",
"bytes": "724"
},
{
"name": "Shell",
"bytes": "187"
}
],
"symlink_target": ""
} |
import os, time, inspect
import cPickle as pickle
# Internal modules #
from plumbing.autopaths import FilePath
# Third party modules #
from decorator import decorator
################################################################################
def property_cached(f):
"""Decorator for properties evaluated only once.
It can be used to created a cached property like this:
class Employee(object):
@property_cached
def salary(self):
print "Evaluating..."
return time.time()
bob = Employee()
print bob.salary
print bob.salary
bob.salary = "10000$"
print bob.salary
"""
# Called when you access the property #
def retrieve_from_cache(self):
if '__cache__' not in self.__dict__: self.__cache__ = {}
if f.__name__ not in self.__cache__:
if inspect.isgeneratorfunction(f): result = tuple(f(self))
else: result = f(self)
self.__cache__[f.__name__] = result
return self.__cache__[f.__name__]
# Called when you set the property #
def overwrite_cache(self, value):
if '__cache__' not in self.__dict__: self.__cache__ = {}
self.__cache__[f.__name__] = value
# Return a wrapper #
retrieve_from_cache.__doc__ = f.__doc__
return property(retrieve_from_cache, overwrite_cache)
################################################################################
def property_pickled(f):
"""Same thing as above but the result will be stored on disk
The path of the pickle file will be determined by looking for the
`cache_dir` attribute of the instance containing the cached property.
If no `cache_dir` attribute exists the `p` attribute will be accessed with
the name of the property being cached."""
# Called when you access the property #
def retrieve_from_cache(self):
# Is it in the cache ? #
if '__cache__' not in self.__dict__: self.__cache__ = {}
if f.__name__ in self.__cache__: return self.__cache__[f.__name__]
# Where should we look in the file system ? #
if 'cache_dir' in self.__dict__:
path = FilePath(self.__dict__['cache_dir'] + f.func_name + '.pickle')
else:
path = getattr(self.p, f.func_name)
# Is it on disk ? #
if path.exists:
with open(path) as handle: result = pickle.load(handle)
self.__cache__[f.__name__] = result
return result
# Otherwise let's compute it #
result = f(self)
with open(path, 'w') as handle: pickle.dump(result, handle)
self.__cache__[f.__name__] = result
return result
# Called when you set the property #
def overwrite_cache(self, value):
path = getattr(self.p, f.func_name)
if value is None: os.remove(path)
else: raise Exception("You can't set a pickled property, you can only delete it")
# Return a wrapper #
retrieve_from_cache.__doc__ = f.__doc__
return property(retrieve_from_cache, overwrite_cache)
################################################################################
def expiry_every(seconds=0):
def memoize_with_expiry(func, *args, **kwargs):
# Get the cache #
if not hasattr(func, '__cache__'): func.__cache__ = [(0,0)]
cache = func.__cache__
# Check the cache #
if cache:
result, timestamp = cache[0]
age = time.time() - timestamp
if age < seconds: return result
# Update the cache #
result = func(*args, **kwargs)
cache[0] = (result, time.time())
# Return #
return result
return decorator(memoize_with_expiry)
###############################################################################
class LazyString(object):
"""A string-like object that will only compute its value once when accessed"""
def __str__(self): return self.value
def __init__(self, function):
self._value = None
self.function = function
@property
def value(self):
if self._value == None: self._value = self.function()
return self._value | {
"content_hash": "ad3845c690489c11cc1f09e1be1fe963",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 89,
"avg_line_length": 39.23364485981308,
"alnum_prop": 0.5409718913768461,
"repo_name": "DC23/plumbing",
"id": "a014fdb58f8eb0392f5baf8714c79bef0cac34d4",
"size": "4219",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plumbing/cache.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "121403"
}
],
"symlink_target": ""
} |
"""Unit tests for the DB API."""
import copy
import datetime
import iso8601
import types
import uuid as stdlib_uuid
import mox
import netaddr
from oslo.config import cfg
from sqlalchemy.dialects import sqlite
from sqlalchemy import exc
from sqlalchemy.exc import IntegrityError
from sqlalchemy import MetaData
from sqlalchemy.orm import exc as sqlalchemy_orm_exc
from sqlalchemy.orm import query
from sqlalchemy.sql.expression import select
from nova import block_device
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import api as sqlalchemy_api
from nova.db.sqlalchemy import models
from nova.db.sqlalchemy import utils as db_utils
from nova import exception
from nova.openstack.common.db import exception as db_exc
from nova.openstack.common.db.sqlalchemy import session as db_session
from nova.openstack.common import timeutils
from nova.openstack.common import uuidutils
from nova.quota import ReservableResource
from nova.quota import resources
from nova import test
from nova.tests import matchers
from nova import utils
CONF = cfg.CONF
CONF.import_opt('reserved_host_memory_mb', 'nova.compute.resource_tracker')
CONF.import_opt('reserved_host_disk_mb', 'nova.compute.resource_tracker')
get_engine = db_session.get_engine
get_session = db_session.get_session
def _quota_reserve(context, project_id, user_id):
"""Create sample Quota, QuotaUsage and Reservation objects.
There is no method db.quota_usage_create(), so we have to use
db.quota_reserve() for creating QuotaUsage objects.
Returns reservations uuids.
"""
def get_sync(resource, usage):
def sync(elevated, project_id, user_id, session):
return {resource: usage}
return sync
quotas = {}
user_quotas = {}
resources = {}
deltas = {}
for i in range(3):
resource = 'resource%d' % i
sync_name = '_sync_%s' % resource
quotas[resource] = db.quota_create(context, project_id, resource, i)
user_quotas[resource] = db.quota_create(context, project_id,
resource, i, user_id=user_id)
resources[resource] = ReservableResource(
resource, sync_name, 'quota_res_%d' % i)
deltas[resource] = i
setattr(sqlalchemy_api, sync_name, get_sync(resource, i))
sqlalchemy_api.QUOTA_SYNC_FUNCTIONS[sync_name] = getattr(
sqlalchemy_api, sync_name)
return db.quota_reserve(context, resources, quotas, user_quotas, deltas,
timeutils.utcnow(), timeutils.utcnow(),
datetime.timedelta(days=1), project_id, user_id)
class DbTestCase(test.TestCase):
def setUp(self):
super(DbTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def create_instance_with_args(self, **kwargs):
args = {'reservation_id': 'a', 'image_ref': 1, 'host': 'host1',
'node': 'node1', 'project_id': self.project_id,
'vm_state': 'fake'}
if 'context' in kwargs:
ctxt = kwargs.pop('context')
args['project_id'] = ctxt.project_id
else:
ctxt = self.context
args.update(kwargs)
return db.instance_create(ctxt, args)
def fake_metadata(self, content):
meta = {}
for i in range(0, 10):
meta["foo%i" % i] = "this is %s item %i" % (content, i)
return meta
def create_metadata_for_instance(self, instance_uuid):
meta = self.fake_metadata('metadata')
db.instance_metadata_update(self.context, instance_uuid, meta, False)
sys_meta = self.fake_metadata('system_metadata')
db.instance_system_metadata_update(self.context, instance_uuid,
sys_meta, False)
return meta, sys_meta
class DecoratorTestCase(test.TestCase):
def _test_decorator_wraps_helper(self, decorator):
def test_func():
"""Test docstring."""
decorated_func = decorator(test_func)
self.assertEquals(test_func.func_name, decorated_func.func_name)
self.assertEquals(test_func.__doc__, decorated_func.__doc__)
self.assertEquals(test_func.__module__, decorated_func.__module__)
def test_require_context_decorator_wraps_functions_properly(self):
self._test_decorator_wraps_helper(sqlalchemy_api.require_context)
def test_require_admin_context_decorator_wraps_functions_properly(self):
self._test_decorator_wraps_helper(sqlalchemy_api.require_admin_context)
def _get_fake_aggr_values():
return {'name': 'fake_aggregate'}
def _get_fake_aggr_metadata():
return {'fake_key1': 'fake_value1',
'fake_key2': 'fake_value2',
'availability_zone': 'fake_avail_zone'}
def _get_fake_aggr_hosts():
return ['foo.openstack.org']
def _create_aggregate(context=context.get_admin_context(),
values=_get_fake_aggr_values(),
metadata=_get_fake_aggr_metadata()):
return db.aggregate_create(context, values, metadata)
def _create_aggregate_with_hosts(context=context.get_admin_context(),
values=_get_fake_aggr_values(),
metadata=_get_fake_aggr_metadata(),
hosts=_get_fake_aggr_hosts()):
result = _create_aggregate(context=context,
values=values, metadata=metadata)
for host in hosts:
db.aggregate_host_add(context, result['id'], host)
return result
class NotDbApiTestCase(DbTestCase):
def setUp(self):
super(NotDbApiTestCase, self).setUp()
self.flags(connection='notdb://', group='database')
def test_instance_get_all_by_filters_regex_unsupported_db(self):
# Ensure that the 'LIKE' operator is used for unsupported dbs.
self.create_instance_with_args(display_name='test1')
self.create_instance_with_args(display_name='test.*')
self.create_instance_with_args(display_name='diff')
result = db.instance_get_all_by_filters(self.context,
{'display_name': 'test.*'})
self.assertEqual(1, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'})
self.assertEqual(2, len(result))
def test_instance_get_all_by_filters_paginate(self):
test1 = self.create_instance_with_args(display_name='test1')
test2 = self.create_instance_with_args(display_name='test2')
test3 = self.create_instance_with_args(display_name='test3')
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
marker=None)
self.assertEqual(3, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test1['uuid'])
self.assertEqual(2, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test2['uuid'])
self.assertEqual(1, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test3['uuid'])
self.assertEqual(0, len(result))
self.assertRaises(exception.MarkerNotFound,
db.instance_get_all_by_filters,
self.context, {'display_name': '%test%'},
marker=str(stdlib_uuid.uuid4()))
class AggregateDBApiTestCase(test.TestCase):
def setUp(self):
super(AggregateDBApiTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def test_aggregate_create_no_metadata(self):
result = _create_aggregate(metadata=None)
self.assertEquals(result['name'], 'fake_aggregate')
def test_aggregate_create_avoid_name_conflict(self):
r1 = _create_aggregate(metadata=None)
db.aggregate_delete(context.get_admin_context(), r1['id'])
values = {'name': r1['name']}
metadata = {'availability_zone': 'new_zone'}
r2 = _create_aggregate(values=values, metadata=metadata)
self.assertEqual(r2['name'], values['name'])
self.assertEqual(r2['availability_zone'],
metadata['availability_zone'])
def test_aggregate_create_raise_exist_exc(self):
_create_aggregate(metadata=None)
self.assertRaises(exception.AggregateNameExists,
_create_aggregate, metadata=None)
def test_aggregate_get_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_get,
ctxt, aggregate_id)
def test_aggregate_metadata_get_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_metadata_get,
ctxt, aggregate_id)
def test_aggregate_create_with_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(expected_metadata,
matchers.DictMatches(_get_fake_aggr_metadata()))
def test_aggregate_create_delete_create_with_metadata(self):
#test for bug 1052479
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(expected_metadata,
matchers.DictMatches(_get_fake_aggr_metadata()))
db.aggregate_delete(ctxt, result['id'])
result = _create_aggregate(metadata={'availability_zone':
'fake_avail_zone'})
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertEqual(expected_metadata, {'availability_zone':
'fake_avail_zone'})
def test_aggregate_create_low_privi_context(self):
self.assertRaises(exception.AdminRequired,
db.aggregate_create,
self.context, _get_fake_aggr_values())
def test_aggregate_get(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt)
expected = db.aggregate_get(ctxt, result['id'])
self.assertEqual(_get_fake_aggr_hosts(), expected['hosts'])
self.assertEqual(_get_fake_aggr_metadata(), expected['metadetails'])
def test_aggregate_get_by_host(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate2'}
values3 = {'name': 'fake_aggregate3'}
values4 = {'name': 'fake_aggregate4'}
values5 = {'name': 'fake_aggregate5'}
a1 = _create_aggregate_with_hosts(context=ctxt)
a2 = _create_aggregate_with_hosts(context=ctxt, values=values2)
# a3 has no hosts and should not be in the results.
a3 = _create_aggregate(context=ctxt, values=values3)
# a4 has no matching hosts.
a4 = _create_aggregate_with_hosts(context=ctxt, values=values4,
hosts=['foo4.openstack.org'])
# a5 has no matching hosts after deleting the only matching host.
a5 = _create_aggregate_with_hosts(context=ctxt, values=values5,
hosts=['foo5.openstack.org', 'foo.openstack.org'])
db.aggregate_host_delete(ctxt, a5['id'],
'foo.openstack.org')
r1 = db.aggregate_get_by_host(ctxt, 'foo.openstack.org')
self.assertEqual([a1['id'], a2['id']], [x['id'] for x in r1])
def test_aggregate_get_by_host_with_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate2'}
values3 = {'name': 'fake_aggregate3'}
values4 = {'name': 'fake_aggregate4'}
a1 = _create_aggregate_with_hosts(context=ctxt,
metadata={'goodkey': 'good'})
_create_aggregate_with_hosts(context=ctxt, values=values2)
_create_aggregate(context=ctxt, values=values3)
_create_aggregate_with_hosts(context=ctxt, values=values4,
hosts=['foo4.openstack.org'], metadata={'goodkey': 'bad'})
# filter result by key
r1 = db.aggregate_get_by_host(ctxt, 'foo.openstack.org', key='goodkey')
self.assertEqual([a1['id']], [x['id'] for x in r1])
def test_aggregate_metadata_get_by_host(self):
ctxt = context.get_admin_context()
values = {'name': 'fake_aggregate2'}
values2 = {'name': 'fake_aggregate3'}
_create_aggregate_with_hosts(context=ctxt)
_create_aggregate_with_hosts(context=ctxt, values=values)
_create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=['bar.openstack.org'], metadata={'badkey': 'bad'})
r1 = db.aggregate_metadata_get_by_host(ctxt, 'foo.openstack.org')
self.assertEqual(r1['fake_key1'], set(['fake_value1']))
self.assertFalse('badkey' in r1)
def test_aggregate_metadata_get_by_metadata_key(self):
ctxt = context.get_admin_context()
values = {'aggregate_id': 'fake_id',
'name': 'fake_aggregate'}
aggr = _create_aggregate_with_hosts(context=ctxt, values=values,
hosts=['bar.openstack.org'],
metadata={'availability_zone':
'az1'})
r1 = db.aggregate_metadata_get_by_metadata_key(ctxt, aggr['id'],
'availability_zone')
self.assertEqual(r1['availability_zone'], set(['az1']))
self.assertTrue('availability_zone' in r1)
self.assertFalse('name' in r1)
def test_aggregate_metadata_get_by_host_with_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate12'}
values3 = {'name': 'fake_aggregate23'}
a2_hosts = ['foo1.openstack.org', 'foo2.openstack.org']
a2_metadata = {'good': 'value12', 'bad': 'badvalue12'}
a3_hosts = ['foo2.openstack.org', 'foo3.openstack.org']
a3_metadata = {'good': 'value23', 'bad': 'badvalue23'}
a1 = _create_aggregate_with_hosts(context=ctxt)
a2 = _create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=a2_hosts, metadata=a2_metadata)
a3 = _create_aggregate_with_hosts(context=ctxt, values=values3,
hosts=a3_hosts, metadata=a3_metadata)
r1 = db.aggregate_metadata_get_by_host(ctxt, 'foo2.openstack.org',
key='good')
self.assertEqual(r1['good'], set(['value12', 'value23']))
self.assertFalse('fake_key1' in r1)
self.assertFalse('bad' in r1)
# Delete metadata
db.aggregate_metadata_delete(ctxt, a3['id'], 'good')
r2 = db.aggregate_metadata_get_by_host(ctxt, 'foo.openstack.org',
key='good')
self.assertFalse('good' in r2)
def test_aggregate_host_get_by_metadata_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate12'}
values3 = {'name': 'fake_aggregate23'}
a2_hosts = ['foo1.openstack.org', 'foo2.openstack.org']
a2_metadata = {'good': 'value12', 'bad': 'badvalue12'}
a3_hosts = ['foo2.openstack.org', 'foo3.openstack.org']
a3_metadata = {'good': 'value23', 'bad': 'badvalue23'}
a1 = _create_aggregate_with_hosts(context=ctxt)
a2 = _create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=a2_hosts, metadata=a2_metadata)
a3 = _create_aggregate_with_hosts(context=ctxt, values=values3,
hosts=a3_hosts, metadata=a3_metadata)
r1 = db.aggregate_host_get_by_metadata_key(ctxt, key='good')
self.assertEqual({
'foo1.openstack.org': set(['value12']),
'foo2.openstack.org': set(['value12', 'value23']),
'foo3.openstack.org': set(['value23']),
}, r1)
self.assertFalse('fake_key1' in r1)
def test_aggregate_get_by_host_not_found(self):
ctxt = context.get_admin_context()
_create_aggregate_with_hosts(context=ctxt)
self.assertEqual([], db.aggregate_get_by_host(ctxt, 'unknown_host'))
def test_aggregate_delete_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_delete,
ctxt, aggregate_id)
def test_aggregate_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
db.aggregate_delete(ctxt, result['id'])
expected = db.aggregate_get_all(ctxt)
self.assertEqual(0, len(expected))
aggregate = db.aggregate_get(ctxt.elevated(read_deleted='yes'),
result['id'])
self.assertEqual(aggregate['deleted'], True)
def test_aggregate_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata={'availability_zone':
'fake_avail_zone'})
self.assertEqual(result['availability_zone'], 'fake_avail_zone')
new_values = _get_fake_aggr_values()
new_values['availability_zone'] = 'different_avail_zone'
updated = db.aggregate_update(ctxt, 1, new_values)
self.assertNotEqual(result['availability_zone'],
updated['availability_zone'])
def test_aggregate_update_with_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
values = _get_fake_aggr_values()
values['metadata'] = _get_fake_aggr_metadata()
values['availability_zone'] = 'different_avail_zone'
db.aggregate_update(ctxt, 1, values)
expected = db.aggregate_metadata_get(ctxt, result['id'])
updated = db.aggregate_get(ctxt, result['id'])
self.assertThat(values['metadata'],
matchers.DictMatches(expected))
self.assertNotEqual(result['availability_zone'],
updated['availability_zone'])
def test_aggregate_update_with_existing_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
values = _get_fake_aggr_values()
values['metadata'] = _get_fake_aggr_metadata()
values['metadata']['fake_key1'] = 'foo'
db.aggregate_update(ctxt, 1, values)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(values['metadata'], matchers.DictMatches(expected))
def test_aggregate_update_zone_with_existing_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
new_zone = {'availability_zone': 'fake_avail_zone_2'}
metadata = _get_fake_aggr_metadata()
metadata.update(new_zone)
db.aggregate_update(ctxt, result['id'], new_zone)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_update_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
new_values = _get_fake_aggr_values()
self.assertRaises(exception.AggregateNotFound,
db.aggregate_update, ctxt, aggregate_id, new_values)
def test_aggregate_get_all(self):
ctxt = context.get_admin_context()
counter = 3
for c in range(counter):
_create_aggregate(context=ctxt,
values={'name': 'fake_aggregate_%d' % c},
metadata=None)
results = db.aggregate_get_all(ctxt)
self.assertEqual(len(results), counter)
def test_aggregate_get_all_non_deleted(self):
ctxt = context.get_admin_context()
add_counter = 5
remove_counter = 2
aggregates = []
for c in range(1, add_counter):
values = {'name': 'fake_aggregate_%d' % c}
aggregates.append(_create_aggregate(context=ctxt,
values=values, metadata=None))
for c in range(1, remove_counter):
db.aggregate_delete(ctxt, aggregates[c - 1]['id'])
results = db.aggregate_get_all(ctxt)
self.assertEqual(len(results), add_counter - remove_counter)
def test_aggregate_metadata_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
metadata = _get_fake_aggr_metadata()
db.aggregate_metadata_add(ctxt, result['id'], metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
metadata = _get_fake_aggr_metadata()
key = metadata.keys()[0]
db.aggregate_metadata_delete(ctxt, result['id'], key)
new_metadata = {key: 'foo'}
db.aggregate_metadata_add(ctxt, result['id'], new_metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
metadata[key] = 'foo'
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
metadata = _get_fake_aggr_metadata()
db.aggregate_metadata_add(ctxt, result['id'], metadata)
db.aggregate_metadata_delete(ctxt, result['id'], metadata.keys()[0])
expected = db.aggregate_metadata_get(ctxt, result['id'])
del metadata[metadata.keys()[0]]
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_remove_availability_zone(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata={'availability_zone':
'fake_avail_zone'})
db.aggregate_metadata_delete(ctxt, result['id'], 'availability_zone')
expected = db.aggregate_metadata_get(ctxt, result['id'])
aggregate = db.aggregate_get(ctxt, result['id'])
self.assertEquals(aggregate['availability_zone'], None)
self.assertThat({}, matchers.DictMatches(expected))
def test_aggregate_metadata_delete_raise_not_found(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
self.assertRaises(exception.AggregateMetadataNotFound,
db.aggregate_metadata_delete,
ctxt, result['id'], 'foo_key')
def test_aggregate_host_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(_get_fake_aggr_hosts(), expected)
def test_aggregate_host_re_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
host = _get_fake_aggr_hosts()[0]
db.aggregate_host_delete(ctxt, result['id'], host)
db.aggregate_host_add(ctxt, result['id'], host)
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(len(expected), 1)
def test_aggregate_host_add_duplicate_works(self):
ctxt = context.get_admin_context()
r1 = _create_aggregate_with_hosts(context=ctxt, metadata=None)
r2 = _create_aggregate_with_hosts(ctxt,
values={'name': 'fake_aggregate2'},
metadata={'availability_zone': 'fake_avail_zone2'})
h1 = db.aggregate_host_get_all(ctxt, r1['id'])
h2 = db.aggregate_host_get_all(ctxt, r2['id'])
self.assertEqual(h1, h2)
def test_aggregate_host_add_duplicate_raise_exist_exc(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
self.assertRaises(exception.AggregateHostExists,
db.aggregate_host_add,
ctxt, result['id'], _get_fake_aggr_hosts()[0])
def test_aggregate_host_add_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
host = _get_fake_aggr_hosts()[0]
self.assertRaises(exception.AggregateNotFound,
db.aggregate_host_add,
ctxt, aggregate_id, host)
def test_aggregate_host_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
db.aggregate_host_delete(ctxt, result['id'],
_get_fake_aggr_hosts()[0])
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(0, len(expected))
def test_aggregate_host_delete_raise_not_found(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
self.assertRaises(exception.AggregateHostNotFound,
db.aggregate_host_delete,
ctxt, result['id'], _get_fake_aggr_hosts()[0])
class SqlAlchemyDbApiTestCase(DbTestCase):
def test_instance_get_all_by_host(self):
ctxt = context.get_admin_context()
self.create_instance_with_args()
self.create_instance_with_args()
self.create_instance_with_args(host='host2')
result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1')
self.assertEqual(2, len(result))
def test_instance_get_all_uuids_by_host(self):
ctxt = context.get_admin_context()
self.create_instance_with_args()
self.create_instance_with_args()
self.create_instance_with_args(host='host2')
result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1')
self.assertEqual(2, len(result))
self.assertEqual(types.UnicodeType, type(result[0]))
class MigrationTestCase(test.TestCase):
def setUp(self):
super(MigrationTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self._create()
self._create()
self._create(status='reverted')
self._create(status='confirmed')
self._create(source_compute='host2', source_node='b',
dest_compute='host1', dest_node='a')
self._create(source_compute='host2', dest_compute='host3')
self._create(source_compute='host3', dest_compute='host4')
def _create(self, status='migrating', source_compute='host1',
source_node='a', dest_compute='host2', dest_node='b',
system_metadata=None):
values = {'host': source_compute}
instance = db.instance_create(self.ctxt, values)
if system_metadata:
db.instance_system_metadata_update(self.ctxt, instance['uuid'],
system_metadata, False)
values = {'status': status, 'source_compute': source_compute,
'source_node': source_node, 'dest_compute': dest_compute,
'dest_node': dest_node, 'instance_uuid': instance['uuid']}
db.migration_create(self.ctxt, values)
def _assert_in_progress(self, migrations):
for migration in migrations:
self.assertNotEqual('confirmed', migration['status'])
self.assertNotEqual('reverted', migration['status'])
def test_migration_get_in_progress_joins(self):
self._create(source_compute='foo', system_metadata={'foo': 'bar'})
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'foo', 'a')
system_metadata = migrations[0]['instance']['system_metadata'][0]
self.assertEqual(system_metadata['key'], 'foo')
self.assertEqual(system_metadata['value'], 'bar')
def test_in_progress_host1_nodea(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host1', 'a')
# 2 as source + 1 as dest
self.assertEqual(3, len(migrations))
self._assert_in_progress(migrations)
def test_in_progress_host1_nodeb(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host1', 'b')
# some migrations are to/from host1, but none with a node 'b'
self.assertEqual(0, len(migrations))
def test_in_progress_host2_nodeb(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host2', 'b')
# 2 as dest, 1 as source
self.assertEqual(3, len(migrations))
self._assert_in_progress(migrations)
def test_instance_join(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host2', 'b')
for migration in migrations:
instance = migration['instance']
self.assertEqual(migration['instance_uuid'], instance['uuid'])
def test_get_migrations_by_filters(self):
filters = {"status": "migrating", "host": "host3"}
migrations = db.migration_get_all_by_filters(self.ctxt, filters)
self.assertEqual(2, len(migrations))
for migration in migrations:
self.assertEqual(filters["status"], migration['status'])
hosts = [migration['source_compute'], migration['dest_compute']]
self.assertIn(filters["host"], hosts)
def test_only_admin_can_get_all_migrations_by_filters(self):
user_ctxt = context.RequestContext(user_id=None, project_id=None,
is_admin=False, read_deleted="no",
overwrite=False)
self.assertRaises(exception.AdminRequired,
db.migration_get_all_by_filters, user_ctxt, {})
def test_migration_get_unconfirmed_by_dest_compute(self):
# Ensure no migrations are returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host')
self.assertEqual(0, len(results))
# Ensure no migrations are returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host2')
self.assertEqual(0, len(results))
updated_at = datetime.datetime(2000, 1, 1, 12, 0, 0)
values = {"status": "finished", "updated_at": updated_at,
"dest_compute": "fake_host2"}
migration = db.migration_create(self.ctxt, values)
# Ensure different host is not returned
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host')
self.assertEqual(0, len(results))
# Ensure one migration older than 10 seconds is returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host2')
self.assertEqual(1, len(results))
db.migration_update(self.ctxt, migration['id'],
{"status": "CONFIRMED"})
# Ensure the new migration is not returned.
updated_at = timeutils.utcnow()
values = {"status": "finished", "updated_at": updated_at,
"dest_compute": "fake_host2"}
migration = db.migration_create(self.ctxt, values)
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
"fake_host2")
self.assertEqual(0, len(results))
db.migration_update(self.ctxt, migration['id'],
{"status": "CONFIRMED"})
class ModelsObjectComparatorMixin(object):
def _dict_from_object(self, obj, ignored_keys):
if ignored_keys is None:
ignored_keys = []
return dict([(k, v) for k, v in obj.iteritems()
if k not in ignored_keys])
def _assertEqualObjects(self, obj1, obj2, ignored_keys=None):
obj1 = self._dict_from_object(obj1, ignored_keys)
obj2 = self._dict_from_object(obj2, ignored_keys)
self.assertEqual(len(obj1),
len(obj2),
"Keys mismatch: %s" %
str(set(obj1.keys()) ^ set(obj2.keys())))
for key, value in obj1.iteritems():
self.assertEqual(value, obj2[key])
def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None):
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
sort_key = lambda d: [d[k] for k in sorted(d)]
conv_and_sort = lambda obj: sorted(map(obj_to_dict, obj), key=sort_key)
self.assertEqual(conv_and_sort(objs1), conv_and_sort(objs2))
def _assertEqualListsOfPrimitivesAsSets(self, primitives1, primitives2):
self.assertEqual(len(primitives1), len(primitives2))
for primitive in primitives1:
self.assertIn(primitive, primitives2)
for primitive in primitives2:
self.assertIn(primitive, primitives1)
class InstanceSystemMetadataTestCase(test.TestCase):
"""Tests for db.api.instance_system_metadata_* methods."""
def setUp(self):
super(InstanceSystemMetadataTestCase, self).setUp()
values = {'host': 'h1', 'project_id': 'p1',
'system_metadata': {'key': 'value'}}
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, values)
def test_instance_system_metadata_get(self):
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'value'})
def test_instance_system_metadata_update_new_pair(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'new_key': 'new_value'}, False)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'value', 'new_key': 'new_value'})
def test_instance_system_metadata_update_existent_pair(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'key': 'new_value'}, True)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'new_value'})
def test_instance_system_metadata_update_delete_true(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'new_key': 'new_value'}, True)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'new_key': 'new_value'})
@test.testtools.skip("bug 1189462")
def test_instance_system_metadata_update_nonexistent(self):
self.assertRaises(exception.InstanceNotFound,
db.instance_system_metadata_update,
self.ctxt, 'nonexistent-uuid',
{'key': 'value'}, True)
class ReservationTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.reservation_* methods."""
def setUp(self):
super(ReservationTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = {'uuid': 'sample-uuid',
'project_id': 'project1',
'user_id': 'user1',
'resource': 'resource',
'delta': 42,
'expire': timeutils.utcnow() + datetime.timedelta(days=1),
'usage': {'id': 1}}
def test_reservation_create(self):
reservation = db.reservation_create(self.ctxt, **self.values)
self._assertEqualObjects(self.values, reservation, ignored_keys=(
'deleted', 'updated_at',
'deleted_at', 'id',
'created_at', 'usage',
'usage_id'))
self.assertEqual(reservation['usage_id'], self.values['usage']['id'])
def test_reservation_get(self):
reservation = db.reservation_create(self.ctxt, **self.values)
reservation_db = db.reservation_get(self.ctxt, self.values['uuid'])
self._assertEqualObjects(reservation, reservation_db)
def test_reservation_get_nonexistent(self):
self.assertRaises(exception.ReservationNotFound, db.reservation_get,
self.ctxt, 'non-exitent-resevation-uuid')
def test_reservation_commit(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 1, 'in_use': 1},
'resource2': {'reserved': 2, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
db.reservation_get(self.ctxt, reservations[0])
db.reservation_commit(self.ctxt, reservations, 'project1', 'user1')
self.assertRaises(exception.ReservationNotFound,
db.reservation_get, self.ctxt, reservations[0])
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 2},
'resource2': {'reserved': 0, 'in_use': 4}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
def test_reservation_rollback(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 1, 'in_use': 1},
'resource2': {'reserved': 2, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
db.reservation_get(self.ctxt, reservations[0])
db.reservation_rollback(self.ctxt, reservations, 'project1', 'user1')
self.assertRaises(exception.ReservationNotFound,
db.reservation_get, self.ctxt, reservations[0])
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 1},
'resource2': {'reserved': 0, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
def test_reservation_expire(self):
self.values['expire'] = timeutils.utcnow() + datetime.timedelta(days=1)
_quota_reserve(self.ctxt, 'project1', 'user1')
db.reservation_expire(self.ctxt)
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 1},
'resource2': {'reserved': 0, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
class SecurityGroupRuleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(SecurityGroupRuleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'name': 'fake_sec_group',
'description': 'fake_sec_group_descr',
'user_id': 'fake',
'project_id': 'fake',
'instances': []
}
def _get_base_rule_values(self):
return {
'protocol': "tcp",
'from_port': 80,
'to_port': 8080,
'cidr': None,
'deleted': 0,
'deleted_at': None,
'grantee_group': None,
'updated_at': None
}
def _create_security_group(self, values):
v = self._get_base_values()
v.update(values)
return db.security_group_create(self.ctxt, v)
def _create_security_group_rule(self, values):
v = self._get_base_rule_values()
v.update(values)
return db.security_group_rule_create(self.ctxt, v)
def test_security_group_rule_create(self):
security_group_rule = self._create_security_group_rule({})
self.assertIsNotNone(security_group_rule['id'])
for key, value in self._get_base_rule_values().items():
self.assertEqual(value, security_group_rule[key])
def test_security_group_rule_get_by_security_group(self):
security_group = self._create_security_group({})
security_group_rule = self._create_security_group_rule(
{'parent_group': security_group})
security_group_rule1 = self._create_security_group_rule(
{'parent_group': security_group})
found_rules = db.security_group_rule_get_by_security_group(self.ctxt,
security_group['id'])
self.assertEqual(len(found_rules), 2)
rules_ids = [security_group_rule['id'], security_group_rule1['id']]
for rule in found_rules:
self.assertIn(rule['id'], rules_ids)
def test_security_group_rule_get_by_security_group_grantee(self):
security_group = self._create_security_group({})
security_group_rule = self._create_security_group_rule(
{'grantee_group': security_group})
rules = db.security_group_rule_get_by_security_group_grantee(self.ctxt,
security_group['id'])
self.assertEqual(len(rules), 1)
self.assertEqual(rules[0]['id'], security_group_rule['id'])
def test_security_group_rule_destroy(self):
security_group1 = self._create_security_group({'name': 'fake1'})
security_group2 = self._create_security_group({'name': 'fake2'})
security_group_rule1 = self._create_security_group_rule({})
security_group_rule2 = self._create_security_group_rule({})
db.security_group_rule_destroy(self.ctxt, security_group_rule1['id'])
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_get,
self.ctxt, security_group_rule1['id'])
self._assertEqualObjects(db.security_group_rule_get(self.ctxt,
security_group_rule2['id']),
security_group_rule2, ['grantee_group'])
def test_security_group_rule_destroy_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_destroy, self.ctxt, 100500)
def test_security_group_rule_get(self):
security_group_rule1 = (
self._create_security_group_rule({}))
security_group_rule2 = self._create_security_group_rule({})
real_security_group_rule = db.security_group_rule_get(self.ctxt,
security_group_rule1['id'])
self._assertEqualObjects(security_group_rule1,
real_security_group_rule, ['grantee_group'])
def test_security_group_rule_get_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_get, self.ctxt, 100500)
def test_security_group_rule_count_by_group(self):
sg1 = self._create_security_group({'name': 'fake1'})
sg2 = self._create_security_group({'name': 'fake2'})
rules_by_group = {sg1: [], sg2: []}
for group in rules_by_group:
rules = rules_by_group[group]
for i in range(0, 10):
rules.append(
self._create_security_group_rule({'parent_group_id':
group['id']}))
db.security_group_rule_destroy(self.ctxt,
rules_by_group[sg1][0]['id'])
counted_groups = [db.security_group_rule_count_by_group(self.ctxt,
group['id'])
for group in [sg1, sg2]]
expected = [9, 10]
self.assertEqual(counted_groups, expected)
class SecurityGroupTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(SecurityGroupTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'name': 'fake_sec_group',
'description': 'fake_sec_group_descr',
'user_id': 'fake',
'project_id': 'fake',
'instances': []
}
def _create_security_group(self, values):
v = self._get_base_values()
v.update(values)
return db.security_group_create(self.ctxt, v)
def test_security_group_create(self):
security_group = self._create_security_group({})
self.assertFalse(security_group['id'] is None)
for key, value in self._get_base_values().iteritems():
self.assertEqual(value, security_group[key])
def test_security_group_destroy(self):
security_group1 = self._create_security_group({})
security_group2 = \
self._create_security_group({'name': 'fake_sec_group2'})
db.security_group_destroy(self.ctxt, security_group1['id'])
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_get,
self.ctxt, security_group1['id'])
self._assertEqualObjects(db.security_group_get(
self.ctxt, security_group2['id'],
columns_to_join=['instances']), security_group2)
def test_security_group_get(self):
security_group1 = self._create_security_group({})
self._create_security_group({'name': 'fake_sec_group2'})
real_security_group = db.security_group_get(self.ctxt,
security_group1['id'],
columns_to_join=['instances'])
self._assertEqualObjects(security_group1,
real_security_group)
def test_security_group_get_no_instances(self):
instance = db.instance_create(self.ctxt, {})
sid = self._create_security_group({'instances': [instance]})['id']
session = get_session()
self.mox.StubOutWithMock(sqlalchemy_api, 'get_session')
sqlalchemy_api.get_session().AndReturn(session)
sqlalchemy_api.get_session().AndReturn(session)
self.mox.ReplayAll()
security_group = db.security_group_get(self.ctxt, sid,
columns_to_join=['instances'])
session.expunge(security_group)
self.assertEqual(1, len(security_group['instances']))
security_group = db.security_group_get(self.ctxt, sid)
session.expunge(security_group)
self.assertRaises(sqlalchemy_orm_exc.DetachedInstanceError,
getattr, security_group, 'instances')
def test_security_group_get_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_get, self.ctxt, 100500)
def test_security_group_get_by_name(self):
security_group1 = self._create_security_group({'name': 'fake1'})
security_group2 = self._create_security_group({'name': 'fake2'})
real_security_group1 = db.security_group_get_by_name(
self.ctxt,
security_group1['project_id'],
security_group1['name'])
real_security_group2 = db.security_group_get_by_name(
self.ctxt,
security_group2['project_id'],
security_group2['name'])
self._assertEqualObjects(security_group1, real_security_group1)
self._assertEqualObjects(security_group2, real_security_group2)
def test_security_group_get_by_project(self):
security_group1 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj1'})
security_group2 = self._create_security_group(
{'name': 'fake2', 'project_id': 'fake_proj2'})
real1 = db.security_group_get_by_project(
self.ctxt,
security_group1['project_id'])
real2 = db.security_group_get_by_project(
self.ctxt,
security_group2['project_id'])
expected1, expected2 = [security_group1], [security_group2]
self._assertEqualListsOfObjects(expected1, real1,
ignored_keys=['instances'])
self._assertEqualListsOfObjects(expected2, real2,
ignored_keys=['instances'])
def test_security_group_get_by_instance(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'name': 'fake1', 'instances': [instance]},
{'name': 'fake2', 'instances': [instance]},
{'name': 'fake3', 'instances': []},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = db.security_group_get_by_instance(self.ctxt,
instance['uuid'])
expected = security_groups[:2]
self._assertEqualListsOfObjects(expected, real,
ignored_keys=['instances'])
def test_security_group_get_all(self):
values = [
{'name': 'fake1', 'project_id': 'fake_proj1'},
{'name': 'fake2', 'project_id': 'fake_proj2'},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = db.security_group_get_all(self.ctxt)
self._assertEqualListsOfObjects(security_groups, real,
ignored_keys=['instances'])
def test_security_group_in_use(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'instances': [instance],
'name': 'fake_in_use'},
{'instances': []},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = []
for security_group in security_groups:
in_use = db.security_group_in_use(self.ctxt,
security_group['id'])
real.append(in_use)
expected = [True, False]
self.assertEquals(expected, real)
def test_security_group_ensure_default(self):
self.assertEquals(0, len(db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)))
db.security_group_ensure_default(self.ctxt)
security_groups = db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)
self.assertEquals(1, len(security_groups))
self.assertEquals("default", security_groups[0]["name"])
def test_security_group_update(self):
security_group = self._create_security_group({})
new_values = {
'name': 'sec_group1',
'description': 'sec_group_descr1',
'user_id': 'fake_user1',
'project_id': 'fake_proj1',
}
updated_group = db.security_group_update(self.ctxt,
security_group['id'],
new_values)
for key, value in new_values.iteritems():
self.assertEqual(updated_group[key], value)
def test_security_group_update_to_duplicate(self):
security_group1 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj1'})
security_group2 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj2'})
self.assertRaises(exception.SecurityGroupExists,
db.security_group_update,
self.ctxt, security_group2['id'],
{'project_id': 'fake_proj1'})
class InstanceTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.instance_* methods."""
sample_data = {
'project_id': 'project1',
'hostname': 'example.com',
'host': 'h1',
'node': 'n1',
'metadata': {'mkey1': 'mval1', 'mkey2': 'mval2'},
'system_metadata': {'smkey1': 'smval1', 'smkey2': 'smval2'},
'info_cache': {'ckey': 'cvalue'},
}
def setUp(self):
super(InstanceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _assertEqualInstances(self, instance1, instance2):
self._assertEqualObjects(instance1, instance2,
ignored_keys=['metadata', 'system_metadata', 'info_cache'])
def _assertEqualListsOfInstances(self, list1, list2):
self._assertEqualListsOfObjects(list1, list2,
ignored_keys=['metadata', 'system_metadata', 'info_cache'])
def create_instance_with_args(self, **kwargs):
if 'context' in kwargs:
context = kwargs.pop('context')
else:
context = self.ctxt
args = self.sample_data.copy()
args.update(kwargs)
return db.instance_create(context, args)
def test_instance_create(self):
instance = self.create_instance_with_args()
self.assertTrue(uuidutils.is_uuid_like(instance['uuid']))
def test_instance_create_with_object_values(self):
values = {
'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1'),
}
dt_keys = ('created_at', 'deleted_at', 'updated_at',
'launched_at', 'terminated_at', 'scheduled_at')
dt = timeutils.utcnow()
dt_utc = dt.replace(tzinfo=iso8601.iso8601.Utc())
for key in dt_keys:
values[key] = dt_utc
inst = db.instance_create(self.ctxt, values)
self.assertEqual(inst['access_ip_v4'], '1.2.3.4')
self.assertEqual(inst['access_ip_v6'], '::1')
for key in dt_keys:
self.assertEqual(inst[key], dt)
def test_instance_update_with_object_values(self):
values = {
'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1'),
}
dt_keys = ('created_at', 'deleted_at', 'updated_at',
'launched_at', 'terminated_at', 'scheduled_at')
dt = timeutils.utcnow()
dt_utc = dt.replace(tzinfo=iso8601.iso8601.Utc())
for key in dt_keys:
values[key] = dt_utc
inst = db.instance_create(self.ctxt, {})
inst = db.instance_update(self.ctxt, inst['uuid'], values)
self.assertEqual(inst['access_ip_v4'], '1.2.3.4')
self.assertEqual(inst['access_ip_v6'], '::1')
for key in dt_keys:
self.assertEqual(inst[key], dt)
def test_instance_get_all_with_meta(self):
inst = self.create_instance_with_args()
for inst in db.instance_get_all(self.ctxt):
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_update(self):
instance = self.create_instance_with_args()
metadata = {'host': 'bar', 'key2': 'wuff'}
system_metadata = {'original_image_ref': 'baz'}
# Update the metadata
db.instance_update(self.ctxt, instance['uuid'], {'metadata': metadata,
'system_metadata': system_metadata})
# Retrieve the user-provided metadata to ensure it was successfully
# updated
self.assertEqual(metadata,
db.instance_metadata_get(self.ctxt, instance['uuid']))
self.assertEqual(system_metadata,
db.instance_system_metadata_get(self.ctxt, instance['uuid']))
def test_instance_update_bad_str_dates(self):
instance = self.create_instance_with_args()
values = {'created_at': '123'}
self.assertRaises(ValueError,
db.instance_update,
self.ctxt, instance['uuid'], values)
def test_instance_update_good_str_dates(self):
instance = self.create_instance_with_args()
values = {'created_at': '2011-01-31T00:00:00.0'}
actual = db.instance_update(self.ctxt, instance['uuid'], values)
expected = datetime.datetime(2011, 1, 31)
self.assertEquals(expected, actual["created_at"])
def test_create_instance_unique_hostname(self):
context1 = context.RequestContext('user1', 'p1')
context2 = context.RequestContext('user2', 'p2')
self.create_instance_with_args(hostname='h1', project_id='p1')
# With scope 'global' any duplicate should fail, be it this project:
self.flags(osapi_compute_unique_server_name_scope='global')
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context1,
hostname='h1', project_id='p3')
# or another:
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context2,
hostname='h1', project_id='p2')
# With scope 'project' a duplicate in the project should fail:
self.flags(osapi_compute_unique_server_name_scope='project')
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context1,
hostname='h1', project_id='p1')
# With scope 'project' a duplicate in a different project should work:
self.flags(osapi_compute_unique_server_name_scope='project')
self.create_instance_with_args(context=context2, hostname='h2')
self.flags(osapi_compute_unique_server_name_scope=None)
def test_instance_get_all_with_meta(self):
inst = self.create_instance_with_args()
for inst in db.instance_get_all(self.ctxt):
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_get_all_by_filters_with_meta(self):
inst = self.create_instance_with_args()
for inst in db.instance_get_all_by_filters(self.ctxt, {}):
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_get_all_by_filters_without_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt, {},
columns_to_join=[])
for inst in result:
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_all_by_filters(self):
instances = [self.create_instance_with_args() for i in range(3)]
filtered_instances = db.instance_get_all_by_filters(self.ctxt, {})
self._assertEqualListsOfInstances(instances, filtered_instances)
def test_instance_metadata_get_multi(self):
uuids = [self.create_instance_with_args()['uuid'] for i in range(3)]
meta = sqlalchemy_api._instance_metadata_get_multi(self.ctxt, uuids)
for row in meta:
self.assertTrue(row['instance_uuid'] in uuids)
def test_instance_metadata_get_multi_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
sqlalchemy_api._instance_metadata_get_multi(self.ctxt, [])
def test_instance_system_system_metadata_get_multi(self):
uuids = [self.create_instance_with_args()['uuid'] for i in range(3)]
sys_meta = sqlalchemy_api._instance_system_metadata_get_multi(
self.ctxt, uuids)
for row in sys_meta:
self.assertTrue(row['instance_uuid'] in uuids)
def test_instance_system_metadata_get_multi_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
sqlalchemy_api._instance_system_metadata_get_multi(self.ctxt, [])
def test_instance_get_all_by_filters_regex(self):
i1 = self.create_instance_with_args(display_name='test1')
i2 = self.create_instance_with_args(display_name='teeeest2')
self.create_instance_with_args(display_name='diff')
result = db.instance_get_all_by_filters(self.ctxt,
{'display_name': 't.*st.'})
self._assertEqualListsOfInstances(result, [i1, i2])
def test_instance_get_all_by_filters_exact_match(self):
instance = self.create_instance_with_args(host='host1')
self.create_instance_with_args(host='host12')
result = db.instance_get_all_by_filters(self.ctxt,
{'host': 'host1'})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_metadata(self):
instance = self.create_instance_with_args(metadata={'foo': 'bar'})
self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt,
{'metadata': {'foo': 'bar'}})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_unicode_value(self):
instance = self.create_instance_with_args(display_name=u'test♥')
result = db.instance_get_all_by_filters(self.ctxt,
{'display_name': u'test'})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_tags(self):
instance = self.create_instance_with_args(
metadata={'foo': 'bar'})
self.create_instance_with_args()
#For format 'tag-'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-key', 'value': 'foo'},
{'name': 'tag-value', 'value': 'bar'},
]})
self._assertEqualListsOfInstances([instance], result)
#For format 'tag:'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'bar'},
]})
self._assertEqualListsOfInstances([instance], result)
#For non-existent tag
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'barred'},
]})
self.assertEqual([], result)
#Confirm with deleted tags
db.instance_metadata_delete(self.ctxt, instance['uuid'], 'foo')
#For format 'tag-'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-key', 'value': 'foo'},
]})
self.assertEqual([], result)
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-value', 'value': 'bar'}
]})
self.assertEqual([], result)
#For format 'tag:'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'bar'},
]})
self.assertEqual([], result)
def test_instance_get_by_uuid(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'])
self._assertEqualInstances(inst, result)
def test_instance_get_by_uuid_join_empty(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=[])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_by_uuid_join_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=['metadata'])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_by_uuid_join_sys_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=['system_metadata'])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_get_all_by_filters_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(reservation_id='b')
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt, {})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['metadata', 'system_metadata',
'deleted', 'deleted_at', 'info_cache'])
def test_instance_get_all_by_filters_deleted_and_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': True})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['metadata', 'system_metadata',
'deleted', 'deleted_at', 'info_cache'])
def test_instance_get_all_by_filters_deleted_no_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': True,
'soft_deleted': False})
self._assertEqualListsOfObjects([inst1], result,
ignored_keys=['deleted', 'deleted_at', 'metadata',
'system_metadata', 'info_cache'])
def test_instance_get_all_by_filters_alive_and_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': False,
'soft_deleted': True})
self._assertEqualListsOfInstances([inst2, inst3], result)
def test_instance_get_all_by_filters_cleaned(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(reservation_id='b')
db.instance_update(self.ctxt, inst1['uuid'], {'cleaned': 1})
result = db.instance_get_all_by_filters(self.ctxt, {})
self.assertEqual(2, len(result))
self.assertIn(inst1['uuid'], [result[0]['uuid'], result[1]['uuid']])
self.assertIn(inst2['uuid'], [result[0]['uuid'], result[1]['uuid']])
if inst1['uuid'] == result[0]['uuid']:
self.assertTrue(result[0]['cleaned'])
self.assertFalse(result[1]['cleaned'])
else:
self.assertTrue(result[1]['cleaned'])
self.assertFalse(result[0]['cleaned'])
def test_instance_get_all_by_host_and_node_no_join(self):
instance = self.create_instance_with_args()
result = db.instance_get_all_by_host_and_node(self.ctxt, 'h1', 'n1')
self.assertEqual(result[0]['uuid'], instance['uuid'])
self.assertEqual(result[0]['system_metadata'], [])
def test_instance_get_all_hung_in_rebooting(self):
# Ensure no instances are returned.
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self.assertEqual([], results)
# Ensure one rebooting instance with updated_at older than 10 seconds
# is returned.
instance = self.create_instance_with_args(task_state="rebooting",
updated_at=datetime.datetime(2000, 1, 1, 12, 0, 0))
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self._assertEqualListsOfObjects([instance], results,
ignored_keys=['task_state', 'info_cache', 'security_groups',
'metadata', 'system_metadata'])
db.instance_update(self.ctxt, instance['uuid'], {"task_state": None})
# Ensure the newly rebooted instance is not returned.
instance = self.create_instance_with_args(task_state="rebooting",
updated_at=timeutils.utcnow())
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self.assertEqual([], results)
def test_instance_update_with_expected_vm_state(self):
instance = self.create_instance_with_args(vm_state='foo')
db.instance_update(self.ctxt, instance['uuid'], {'host': 'h1',
'expected_vm_state': ('foo', 'bar')})
def test_instance_update_with_unexpected_vm_state(self):
instance = self.create_instance_with_args(vm_state='foo')
self.assertRaises(exception.UnexpectedVMStateError,
db.instance_update, self.ctxt, instance['uuid'],
{'host': 'h1', 'expected_vm_state': ('spam', 'bar')})
def test_instance_update_with_instance_uuid(self):
# test instance_update() works when an instance UUID is passed.
ctxt = context.get_admin_context()
# Create an instance with some metadata
values = {'metadata': {'host': 'foo', 'key1': 'meow'},
'system_metadata': {'original_image_ref': 'blah'}}
instance = db.instance_create(ctxt, values)
# Update the metadata
values = {'metadata': {'host': 'bar', 'key2': 'wuff'},
'system_metadata': {'original_image_ref': 'baz'}}
db.instance_update(ctxt, instance['uuid'], values)
# Retrieve the user-provided metadata to ensure it was successfully
# updated
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
self.assertEqual('bar', instance_meta['host'])
self.assertEqual('wuff', instance_meta['key2'])
self.assertNotIn('key1', instance_meta)
# Retrieve the system metadata to ensure it was successfully updated
system_meta = db.instance_system_metadata_get(ctxt, instance['uuid'])
self.assertEqual('baz', system_meta['original_image_ref'])
def test_delete_instance_metadata_on_instance_destroy(self):
ctxt = context.get_admin_context()
# Create an instance with some metadata
values = {'metadata': {'host': 'foo', 'key1': 'meow'},
'system_metadata': {'original_image_ref': 'blah'}}
instance = db.instance_create(ctxt, values)
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
self.assertEqual('foo', instance_meta['host'])
self.assertEqual('meow', instance_meta['key1'])
db.instance_destroy(ctxt, instance['uuid'])
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
# Make sure instance metadata is deleted as well
self.assertEqual({}, instance_meta)
def test_instance_update_with_and_get_original(self):
instance = self.create_instance_with_args(vm_state='building')
(old_ref, new_ref) = db.instance_update_and_get_original(self.ctxt,
instance['uuid'], {'vm_state': 'needscoffee'})
self.assertEqual('building', old_ref['vm_state'])
self.assertEqual('needscoffee', new_ref['vm_state'])
def test_instance_update_unique_name(self):
context1 = context.RequestContext('user1', 'p1')
context2 = context.RequestContext('user2', 'p2')
inst1 = self.create_instance_with_args(context=context1,
project_id='p1',
hostname='fake_name1')
inst2 = self.create_instance_with_args(context=context1,
project_id='p1',
hostname='fake_name2')
inst3 = self.create_instance_with_args(context=context2,
project_id='p2',
hostname='fake_name3')
# osapi_compute_unique_server_name_scope is unset so this should work:
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_name2'})
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_name1'})
# With scope 'global' any duplicate should fail.
self.flags(osapi_compute_unique_server_name_scope='global')
self.assertRaises(exception.InstanceExists,
db.instance_update,
context1,
inst2['uuid'],
{'hostname': 'fake_name1'})
self.assertRaises(exception.InstanceExists,
db.instance_update,
context2,
inst3['uuid'],
{'hostname': 'fake_name1'})
# But we should definitely be able to update our name if we aren't
# really changing it.
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_NAME'})
# With scope 'project' a duplicate in the project should fail:
self.flags(osapi_compute_unique_server_name_scope='project')
self.assertRaises(exception.InstanceExists, db.instance_update,
context1, inst2['uuid'], {'hostname': 'fake_NAME'})
# With scope 'project' a duplicate in a different project should work:
self.flags(osapi_compute_unique_server_name_scope='project')
db.instance_update(context2, inst3['uuid'], {'hostname': 'fake_NAME'})
def _test_instance_update_updates_metadata(self, metadata_type):
instance = self.create_instance_with_args()
def set_and_check(meta):
inst = db.instance_update(self.ctxt, instance['uuid'],
{metadata_type: dict(meta)})
_meta = utils.metadata_to_dict(inst[metadata_type])
self.assertEqual(meta, _meta)
meta = {'speed': '88', 'units': 'MPH'}
set_and_check(meta)
meta['gigawatts'] = '1.21'
set_and_check(meta)
del meta['gigawatts']
set_and_check(meta)
def test_security_group_in_use(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'instances': [instance]},
{'instances': []},
]
def test_instance_update_updates_system_metadata(self):
# Ensure that system_metadata is updated during instance_update
self._test_instance_update_updates_metadata('system_metadata')
def test_instance_update_updates_metadata(self):
# Ensure that metadata is updated during instance_update
self._test_instance_update_updates_metadata('metadata')
def test_instance_floating_address_get_all(self):
ctxt = context.get_admin_context()
instance1 = db.instance_create(ctxt, {'host': 'h1', 'hostname': 'n1'})
instance2 = db.instance_create(ctxt, {'host': 'h2', 'hostname': 'n2'})
fixed_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_addresses = ['2.1.1.1', '2.1.1.2', '2.1.1.3']
instance_uuids = [instance1['uuid'], instance1['uuid'],
instance2['uuid']]
for fixed_addr, float_addr, instance_uuid in zip(fixed_addresses,
float_addresses,
instance_uuids):
db.fixed_ip_create(ctxt, {'address': fixed_addr,
'instance_uuid': instance_uuid})
fixed_id = db.fixed_ip_get_by_address(ctxt, fixed_addr)['id']
db.floating_ip_create(ctxt,
{'address': float_addr,
'fixed_ip_id': fixed_id})
real_float_addresses = \
db.instance_floating_address_get_all(ctxt, instance_uuids[0])
self.assertEqual(set(float_addresses[:2]), set(real_float_addresses))
real_float_addresses = \
db.instance_floating_address_get_all(ctxt, instance_uuids[2])
self.assertEqual(set([float_addresses[2]]), set(real_float_addresses))
def test_instance_stringified_ips(self):
instance = self.create_instance_with_args()
instance = db.instance_update(
self.ctxt, instance['uuid'],
{'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1')})
self.assertTrue(isinstance(instance['access_ip_v4'], basestring))
self.assertTrue(isinstance(instance['access_ip_v6'], basestring))
instance = db.instance_get_by_uuid(self.ctxt, instance['uuid'])
self.assertTrue(isinstance(instance['access_ip_v4'], basestring))
self.assertTrue(isinstance(instance['access_ip_v6'], basestring))
class InstanceMetadataTestCase(test.TestCase):
"""Tests for db.api.instance_metadata_* methods."""
def setUp(self):
super(InstanceMetadataTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_instance_metadata_get(self):
instance = db.instance_create(self.ctxt, {'metadata':
{'key': 'value'}})
self.assertEqual({'key': 'value'}, db.instance_metadata_get(
self.ctxt, instance['uuid']))
def test_instance_metadata_delete(self):
instance = db.instance_create(self.ctxt,
{'metadata': {'key': 'val',
'key1': 'val1'}})
db.instance_metadata_delete(self.ctxt, instance['uuid'], 'key1')
self.assertEqual({'key': 'val'}, db.instance_metadata_get(
self.ctxt, instance['uuid']))
def test_instance_metadata_update(self):
instance = db.instance_create(self.ctxt, {'host': 'h1',
'project_id': 'p1', 'metadata': {'key': 'value'}})
# This should add new key/value pair
metadata = db.instance_metadata_update(
self.ctxt, instance['uuid'],
{'new_key': 'new_value'}, False)
metadata = db.instance_metadata_get(self.ctxt, instance['uuid'])
self.assertEqual(metadata, {'key': 'value', 'new_key': 'new_value'})
# This should leave only one key/value pair
metadata = db.instance_metadata_update(
self.ctxt, instance['uuid'],
{'new_key': 'new_value'}, True)
metadata = db.instance_metadata_get(self.ctxt, instance['uuid'])
self.assertEqual(metadata, {'new_key': 'new_value'})
class ServiceTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ServiceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'host': 'fake_host',
'binary': 'fake_binary',
'topic': 'fake_topic',
'report_count': 3,
'disabled': False
}
def _create_service(self, values):
v = self._get_base_values()
v.update(values)
return db.service_create(self.ctxt, v)
def test_service_create(self):
service = self._create_service({})
self.assertFalse(service['id'] is None)
for key, value in self._get_base_values().iteritems():
self.assertEqual(value, service[key])
def test_service_destroy(self):
service1 = self._create_service({})
service2 = self._create_service({'host': 'fake_host2'})
db.service_destroy(self.ctxt, service1['id'])
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, service1['id'])
self._assertEqualObjects(db.service_get(self.ctxt, service2['id']),
service2, ignored_keys=['compute_node'])
def test_service_update(self):
service = self._create_service({})
new_values = {
'host': 'fake_host1',
'binary': 'fake_binary1',
'topic': 'fake_topic1',
'report_count': 4,
'disabled': True
}
db.service_update(self.ctxt, service['id'], new_values)
updated_service = db.service_get(self.ctxt, service['id'])
for key, value in new_values.iteritems():
self.assertEqual(value, updated_service[key])
def test_service_update_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_update, self.ctxt, 100500, {})
def test_service_get(self):
service1 = self._create_service({})
self._create_service({'host': 'some_other_fake_host'})
real_service1 = db.service_get(self.ctxt, service1['id'])
self._assertEqualObjects(service1, real_service1,
ignored_keys=['compute_node'])
def test_service_get_with_compute_node(self):
service = self._create_service({})
compute_values = dict(vcpus=2, memory_mb=1024, local_gb=2048,
vcpus_used=0, memory_mb_used=0,
local_gb_used=0, free_ram_mb=1024,
free_disk_gb=2048, hypervisor_type="xen",
hypervisor_version=1, cpu_info="",
running_vms=0, current_workload=0,
service_id=service['id'])
compute = db.compute_node_create(self.ctxt, compute_values)
real_service = db.service_get(self.ctxt, service['id'])
real_compute = real_service['compute_node'][0]
self.assertEqual(compute['id'], real_compute['id'])
def test_service_get_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, 100500)
def test_service_get_by_host_and_topic(self):
service1 = self._create_service({'host': 'host1', 'topic': 'topic1'})
self._create_service({'host': 'host2', 'topic': 'topic2'})
real_service1 = db.service_get_by_host_and_topic(self.ctxt,
host='host1',
topic='topic1')
self._assertEqualObjects(service1, real_service1)
def test_service_get_all(self):
values = [
{'host': 'host1', 'topic': 'topic1'},
{'host': 'host2', 'topic': 'topic2'},
{'disabled': True}
]
services = [self._create_service(vals) for vals in values]
disabled_services = [services[-1]]
non_disabled_services = services[:-1]
compares = [
(services, db.service_get_all(self.ctxt)),
(disabled_services, db.service_get_all(self.ctxt, True)),
(non_disabled_services, db.service_get_all(self.ctxt, False))
]
for comp in compares:
self._assertEqualListsOfObjects(*comp)
def test_service_get_all_by_topic(self):
values = [
{'host': 'host1', 'topic': 't1'},
{'host': 'host2', 'topic': 't1'},
{'disabled': True, 'topic': 't1'},
{'host': 'host3', 'topic': 't2'}
]
services = [self._create_service(vals) for vals in values]
expected = services[:2]
real = db.service_get_all_by_topic(self.ctxt, 't1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_all_by_host(self):
values = [
{'host': 'host1', 'topic': 't11', 'binary': 'b11'},
{'host': 'host1', 'topic': 't12', 'binary': 'b12'},
{'host': 'host2', 'topic': 't1'},
{'host': 'host3', 'topic': 't1'}
]
services = [self._create_service(vals) for vals in values]
expected = services[:2]
real = db.service_get_all_by_host(self.ctxt, 'host1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_by_compute_host(self):
values = [
{'host': 'host1', 'topic': CONF.compute_topic},
{'host': 'host2', 'topic': 't1'},
{'host': 'host3', 'topic': CONF.compute_topic}
]
services = [self._create_service(vals) for vals in values]
real_service = db.service_get_by_compute_host(self.ctxt, 'host1')
self._assertEqualObjects(services[0], real_service,
ignored_keys=['compute_node'])
self.assertRaises(exception.ComputeHostNotFound,
db.service_get_by_compute_host,
self.ctxt, 'non-exists-host')
def test_service_get_by_compute_host_not_found(self):
self.assertRaises(exception.ComputeHostNotFound,
db.service_get_by_compute_host,
self.ctxt, 'non-exists-host')
def test_service_get_by_args(self):
values = [
{'host': 'host1', 'binary': 'a'},
{'host': 'host2', 'binary': 'b'}
]
services = [self._create_service(vals) for vals in values]
service1 = db.service_get_by_args(self.ctxt, 'host1', 'a')
self._assertEqualObjects(services[0], service1)
service2 = db.service_get_by_args(self.ctxt, 'host2', 'b')
self._assertEqualObjects(services[1], service2)
def test_service_get_by_args_not_found_exception(self):
self.assertRaises(exception.HostBinaryNotFound,
db.service_get_by_args,
self.ctxt, 'non-exists-host', 'a')
def test_service_binary_exists_exception(self):
db.service_create(self.ctxt, self._get_base_values())
values = self._get_base_values()
values.update({'topic': 'top1'})
self.assertRaises(exception.ServiceBinaryExists, db.service_create,
self.ctxt, values)
def test_service_topic_exists_exceptions(self):
db.service_create(self.ctxt, self._get_base_values())
values = self._get_base_values()
values.update({'binary': 'bin1'})
self.assertRaises(exception.ServiceTopicExists, db.service_create,
self.ctxt, values)
class BaseInstanceTypeTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(BaseInstanceTypeTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.user_ctxt = context.RequestContext('user', 'user')
def _get_base_values(self):
return {
'name': 'fake_name',
'memory_mb': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': 'fake_flavor',
'swap': 0,
'rxtx_factor': 0.5,
'vcpu_weight': 1,
'disabled': False,
'is_public': True
}
def _create_inst_type(self, values):
v = self._get_base_values()
v.update(values)
return db.flavor_create(self.ctxt, v)
class InstanceActionTestCase(test.TestCase, ModelsObjectComparatorMixin):
IGNORED_FIELDS = [
'id',
'created_at',
'updated_at',
'deleted_at',
'deleted'
]
def setUp(self):
super(InstanceActionTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_action_values(self, uuid, action='run_instance', ctxt=None):
if ctxt is None:
ctxt = self.ctxt
return {
'action': action,
'instance_uuid': uuid,
'request_id': ctxt.request_id,
'user_id': ctxt.user_id,
'project_id': ctxt.project_id,
'start_time': timeutils.utcnow(),
'message': 'action-message'
}
def _create_event_values(self, uuid, event='schedule',
ctxt=None, extra=None):
if ctxt is None:
ctxt = self.ctxt
values = {
'event': event,
'instance_uuid': uuid,
'request_id': ctxt.request_id,
'start_time': timeutils.utcnow()
}
if extra is not None:
values.update(extra)
return values
def _assertActionSaved(self, action, uuid):
"""Retrieve the action to ensure it was successfully added."""
actions = db.actions_get(self.ctxt, uuid)
self.assertEqual(1, len(actions))
self._assertEqualObjects(action, actions[0])
def _assertActionEventSaved(self, event, action_id):
# Retrieve the event to ensure it was successfully added
events = db.action_events_get(self.ctxt, action_id)
self.assertEqual(1, len(events))
self._assertEqualObjects(event, events[0],
['instance_uuid', 'request_id'])
def test_instance_action_start(self):
"""Create an instance action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action = db.action_start(self.ctxt, action_values)
ignored_keys = self.IGNORED_FIELDS + ['finish_time']
self._assertEqualObjects(action_values, action, ignored_keys)
self._assertActionSaved(action, uuid)
def test_instance_action_finish(self):
"""Create an instance action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
db.action_start(self.ctxt, action_values)
action_values['finish_time'] = timeutils.utcnow()
action = db.action_finish(self.ctxt, action_values)
self._assertEqualObjects(action_values, action, self.IGNORED_FIELDS)
self._assertActionSaved(action, uuid)
def test_instance_action_finish_without_started_event(self):
"""Create an instance finish action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action_values['finish_time'] = timeutils.utcnow()
self.assertRaises(exception.InstanceActionNotFound, db.action_finish,
self.ctxt, action_values)
def test_instance_actions_get_by_instance(self):
"""Ensure we can get actions by UUID."""
uuid1 = str(stdlib_uuid.uuid4())
expected = []
action_values = self._create_action_values(uuid1)
action = db.action_start(self.ctxt, action_values)
expected.append(action)
action_values['action'] = 'resize'
action = db.action_start(self.ctxt, action_values)
expected.append(action)
# Create some extra actions
uuid2 = str(stdlib_uuid.uuid4())
ctxt2 = context.get_admin_context()
action_values = self._create_action_values(uuid2, 'reboot', ctxt2)
db.action_start(ctxt2, action_values)
db.action_start(ctxt2, action_values)
# Retrieve the action to ensure it was successfully added
actions = db.actions_get(self.ctxt, uuid1)
self._assertEqualListsOfObjects(expected, actions)
def test_instance_action_get_by_instance_and_action(self):
"""Ensure we can get an action by instance UUID and action id."""
ctxt2 = context.get_admin_context()
uuid1 = str(stdlib_uuid.uuid4())
uuid2 = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid1)
db.action_start(self.ctxt, action_values)
action_values['action'] = 'resize'
db.action_start(self.ctxt, action_values)
action_values = self._create_action_values(uuid2, 'reboot', ctxt2)
db.action_start(ctxt2, action_values)
db.action_start(ctxt2, action_values)
actions = db.actions_get(self.ctxt, uuid1)
request_id = actions[0]['request_id']
action = db.action_get_by_request_id(self.ctxt, uuid1, request_id)
self.assertEqual('run_instance', action['action'])
self.assertEqual(self.ctxt.request_id, action['request_id'])
def test_instance_action_event_start(self):
"""Create an instance action event."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action = db.action_start(self.ctxt, action_values)
event_values = self._create_event_values(uuid)
event = db.action_event_start(self.ctxt, event_values)
# self.fail(self._dict_from_object(event, None))
event_values['action_id'] = action['id']
ignored = self.IGNORED_FIELDS + ['finish_time', 'traceback', 'result']
self._assertEqualObjects(event_values, event, ignored)
self._assertActionEventSaved(event, action['id'])
def test_instance_action_event_start_without_action(self):
"""Create an instance action event."""
uuid = str(stdlib_uuid.uuid4())
event_values = self._create_event_values(uuid)
self.assertRaises(exception.InstanceActionNotFound,
db.action_event_start, self.ctxt, event_values)
def test_instance_action_event_finish_without_started_event(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
db.action_start(self.ctxt, self._create_action_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
self.assertRaises(exception.InstanceActionEventNotFound,
db.action_event_finish, self.ctxt, event_values)
def test_instance_action_event_finish_without_action(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
self.assertRaises(exception.InstanceActionNotFound,
db.action_event_finish, self.ctxt, event_values)
def test_instance_action_event_finish_success(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
db.action_event_start(self.ctxt, self._create_event_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_finish(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
action = db.action_get_by_request_id(self.ctxt, uuid,
self.ctxt.request_id)
self.assertNotEqual('Error', action['message'])
def test_instance_action_event_finish_error(self):
"""Finish an instance action event with an error."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
db.action_event_start(self.ctxt, self._create_event_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Error'
}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_finish(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
action = db.action_get_by_request_id(self.ctxt, uuid,
self.ctxt.request_id)
self.assertEqual('Error', action['message'])
def test_instance_action_and_event_start_string_time(self):
"""Create an instance action and event with a string start_time."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
event_values = {'start_time': timeutils.strtime(timeutils.utcnow())}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_start(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
def test_instance_action_event_get_by_id(self):
"""Get a specific instance action event."""
ctxt2 = context.get_admin_context()
uuid1 = str(stdlib_uuid.uuid4())
uuid2 = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt,
self._create_action_values(uuid1))
db.action_start(ctxt2,
self._create_action_values(uuid2, 'reboot', ctxt2))
event = db.action_event_start(self.ctxt,
self._create_event_values(uuid1))
event_values = self._create_event_values(uuid2, 'reboot', ctxt2)
db.action_event_start(ctxt2, event_values)
# Retrieve the event to ensure it was successfully added
saved_event = db.action_event_get_by_id(self.ctxt,
action['id'],
event['id'])
self._assertEqualObjects(event, saved_event,
['instance_uuid', 'request_id'])
class InstanceFaultTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(InstanceFaultTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_fault_values(self, uuid, code=404):
return {
'message': 'message',
'details': 'detail',
'instance_uuid': uuid,
'code': code,
'host': 'localhost'
}
def test_instance_fault_create(self):
"""Ensure we can create an instance fault."""
uuid = str(stdlib_uuid.uuid4())
# Ensure no faults registered for this instance
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
self.assertEqual(0, len(faults[uuid]))
# Create a fault
fault_values = self._create_fault_values(uuid)
fault = db.instance_fault_create(self.ctxt, fault_values)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(fault_values, fault, ignored_keys)
# Retrieve the fault to ensure it was successfully added
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
self.assertEqual(1, len(faults[uuid]))
self._assertEqualObjects(fault, faults[uuid][0])
def test_instance_fault_get_by_instance(self):
"""Ensure we can retrieve faults for instance."""
uuids = [str(stdlib_uuid.uuid4()), str(stdlib_uuid.uuid4())]
fault_codes = [404, 500]
expected = {}
# Create faults
for uuid in uuids:
expected[uuid] = []
for code in fault_codes:
fault_values = self._create_fault_values(uuid, code)
fault = db.instance_fault_create(self.ctxt, fault_values)
expected[uuid].append(fault)
# Ensure faults are saved
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, uuids)
self.assertEqual(len(expected), len(faults))
for uuid in uuids:
self._assertEqualListsOfObjects(expected[uuid], faults[uuid])
def test_instance_faults_get_by_instance_uuids_no_faults(self):
uuid = str(stdlib_uuid.uuid4())
# None should be returned when no faults exist.
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
expected = {uuid: []}
self.assertEqual(expected, faults)
def test_instance_faults_get_by_instance_uuids_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [])
self.assertEqual({}, faults)
class InstanceTypeTestCase(BaseInstanceTypeTestCase):
def test_flavor_create(self):
inst_type = self._create_inst_type({})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'extra_specs']
self.assertFalse(inst_type['id'] is None)
self._assertEqualObjects(inst_type, self._get_base_values(),
ignored_keys)
def test_instance_type_destroy(self):
specs1 = {'a': '1', 'b': '2'}
inst_type1 = self._create_inst_type({'name': 'name1', 'flavorid': 'a1',
'extra_specs': specs1})
specs2 = {'c': '4', 'd': '3'}
inst_type2 = self._create_inst_type({'name': 'name2', 'flavorid': 'a2',
'extra_specs': specs2})
db.flavor_destroy(self.ctxt, 'name1')
self.assertRaises(exception.InstanceTypeNotFound,
db.flavor_get, self.ctxt, inst_type1['id'])
real_specs1 = db.flavor_extra_specs_get(self.ctxt,
inst_type1['flavorid'])
self._assertEqualObjects(real_specs1, {})
r_inst_type2 = db.flavor_get(self.ctxt, inst_type2['id'])
self._assertEqualObjects(inst_type2, r_inst_type2, 'extra_specs')
def test_instance_type_destroy_not_found(self):
self.assertRaises(exception.InstanceTypeNotFound,
db.flavor_destroy, self.ctxt, 'nonexists')
def test_flavor_create_duplicate_name(self):
self._create_inst_type({})
self.assertRaises(exception.InstanceTypeExists,
self._create_inst_type,
{'flavorid': 'some_random_flavor'})
def test_flavor_create_duplicate_flavorid(self):
self._create_inst_type({})
self.assertRaises(exception.InstanceTypeIdExists,
self._create_inst_type,
{'name': 'some_random_name'})
def test_flavor_create_with_extra_specs(self):
extra_specs = dict(a='abc', b='def', c='ghi')
inst_type = self._create_inst_type({'extra_specs': extra_specs})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'extra_specs']
self._assertEqualObjects(inst_type, self._get_base_values(),
ignored_keys)
self._assertEqualObjects(extra_specs, inst_type['extra_specs'])
def test_instance_type_get_all(self):
# NOTE(boris-42): Remove base instance types
for it in db.flavor_get_all(self.ctxt):
db.flavor_destroy(self.ctxt, it['name'])
instance_types = [
{'root_gb': 600, 'memory_mb': 100, 'disabled': True,
'is_public': True, 'name': 'a1', 'flavorid': 'f1'},
{'root_gb': 500, 'memory_mb': 200, 'disabled': True,
'is_public': True, 'name': 'a2', 'flavorid': 'f2'},
{'root_gb': 400, 'memory_mb': 300, 'disabled': False,
'is_public': True, 'name': 'a3', 'flavorid': 'f3'},
{'root_gb': 300, 'memory_mb': 400, 'disabled': False,
'is_public': False, 'name': 'a4', 'flavorid': 'f4'},
{'root_gb': 200, 'memory_mb': 500, 'disabled': True,
'is_public': False, 'name': 'a5', 'flavorid': 'f5'},
{'root_gb': 100, 'memory_mb': 600, 'disabled': True,
'is_public': False, 'name': 'a6', 'flavorid': 'f6'}
]
instance_types = [self._create_inst_type(it) for it in instance_types]
lambda_filters = {
'min_memory_mb': lambda it, v: it['memory_mb'] >= v,
'min_root_gb': lambda it, v: it['root_gb'] >= v,
'disabled': lambda it, v: it['disabled'] == v,
'is_public': lambda it, v: (v is None or it['is_public'] == v)
}
mem_filts = [{'min_memory_mb': x} for x in [100, 350, 550, 650]]
root_filts = [{'min_root_gb': x} for x in [100, 350, 550, 650]]
disabled_filts = [{'disabled': x} for x in [True, False]]
is_public_filts = [{'is_public': x} for x in [True, False, None]]
def assert_multi_filter_instance_type_get(filters=None):
if filters is None:
filters = {}
expected_it = instance_types
for name, value in filters.iteritems():
filt = lambda it: lambda_filters[name](it, value)
expected_it = filter(filt, expected_it)
real_it = db.flavor_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects(expected_it, real_it)
#no filter
assert_multi_filter_instance_type_get()
#test only with one filter
for filt in mem_filts:
assert_multi_filter_instance_type_get(filt)
for filt in root_filts:
assert_multi_filter_instance_type_get(filt)
for filt in disabled_filts:
assert_multi_filter_instance_type_get(filt)
for filt in is_public_filts:
assert_multi_filter_instance_type_get(filt)
#test all filters together
for mem in mem_filts:
for root in root_filts:
for disabled in disabled_filts:
for is_public in is_public_filts:
filts = [f.items() for f in
[mem, root, disabled, is_public]]
filts = dict(reduce(lambda x, y: x + y, filts, []))
assert_multi_filter_instance_type_get(filts)
def test_instance_type_get(self):
inst_types = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
inst_types = [self._create_inst_type(t) for t in inst_types]
for inst_type in inst_types:
inst_type_by_id = db.flavor_get(self.ctxt, inst_type['id'])
self._assertEqualObjects(inst_type, inst_type_by_id)
def test_instance_type_get_non_public(self):
inst_type = self._create_inst_type({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
inst_type_by_id = db.flavor_get(self.ctxt, inst_type['id'])
self._assertEqualObjects(inst_type, inst_type_by_id)
# Regular user can not
self.assertRaises(exception.InstanceTypeNotFound, db.flavor_get,
self.user_ctxt, inst_type['id'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, inst_type['flavorid'],
self.user_ctxt.project_id)
inst_type_by_id = db.flavor_get(self.user_ctxt, inst_type['id'])
self._assertEqualObjects(inst_type, inst_type_by_id)
def test_instance_type_get_by_name(self):
inst_types = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
inst_types = [self._create_inst_type(t) for t in inst_types]
for inst_type in inst_types:
inst_type_by_name = db.flavor_get_by_name(self.ctxt,
inst_type['name'])
self._assertEqualObjects(inst_type, inst_type_by_name)
def test_instance_type_get_by_name_not_found(self):
self._create_inst_type({})
self.assertRaises(exception.InstanceTypeNotFoundByName,
db.flavor_get_by_name, self.ctxt, 'nonexists')
def test_instance_type_get_by_name_non_public(self):
inst_type = self._create_inst_type({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
inst_type_by_name = db.flavor_get_by_name(self.ctxt,
inst_type['name'])
self._assertEqualObjects(inst_type, inst_type_by_name)
# Regular user can not
self.assertRaises(exception.InstanceTypeNotFoundByName,
db.flavor_get_by_name, self.user_ctxt,
inst_type['name'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, inst_type['flavorid'],
self.user_ctxt.project_id)
inst_type_by_name = db.flavor_get_by_name(self.user_ctxt,
inst_type['name'])
self._assertEqualObjects(inst_type, inst_type_by_name)
def test_instance_type_get_by_flavor_id(self):
inst_types = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
inst_types = [self._create_inst_type(t) for t in inst_types]
for inst_type in inst_types:
params = (self.ctxt, inst_type['flavorid'])
inst_type_by_flavorid = db.flavor_get_by_flavor_id(*params)
self._assertEqualObjects(inst_type, inst_type_by_flavorid)
def test_instance_type_get_by_flavor_not_found(self):
self._create_inst_type({})
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id,
self.ctxt, 'nonexists')
def test_instance_type_get_by_flavor_id_non_public(self):
inst_type = self._create_inst_type({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
inst_type_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
inst_type['flavorid'])
self._assertEqualObjects(inst_type, inst_type_by_fid)
# Regular user can not
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id, self.user_ctxt,
inst_type['flavorid'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, inst_type['flavorid'],
self.user_ctxt.project_id)
inst_type_by_fid = db.flavor_get_by_flavor_id(self.user_ctxt,
inst_type['flavorid'])
self._assertEqualObjects(inst_type, inst_type_by_fid)
def test_instance_type_get_by_flavor_id_deleted(self):
inst_type = self._create_inst_type({'name': 'abc', 'flavorid': '123'})
db.flavor_destroy(self.ctxt, 'abc')
inst_type_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
inst_type['flavorid'], read_deleted='yes')
self.assertEqual(inst_type['id'], inst_type_by_fid['id'])
class InstanceTypeExtraSpecsTestCase(BaseInstanceTypeTestCase):
def setUp(self):
super(InstanceTypeExtraSpecsTestCase, self).setUp()
values = ({'name': 'n1', 'flavorid': 'f1',
'extra_specs': dict(a='a', b='b', c='c')},
{'name': 'n2', 'flavorid': 'f2',
'extra_specs': dict(d='d', e='e', f='f')})
# NOTE(boris-42): We have already tested flavor_create method
# with extra_specs in InstanceTypeTestCase.
self.inst_types = [self._create_inst_type(v) for v in values]
def test_instance_type_extra_specs_get(self):
for it in self.inst_types:
real_specs = db.flavor_extra_specs_get(self.ctxt,
it['flavorid'])
self._assertEqualObjects(it['extra_specs'], real_specs)
def test_instance_type_extra_specs_get_item(self):
expected = dict(f1=dict(a='a', b='b', c='c'),
f2=dict(d='d', e='e', f='f'))
for flavor, specs in expected.iteritems():
for key, val in specs.iteritems():
spec = db.flavor_extra_specs_get_item(self.ctxt, flavor,
key)
self.assertEqual(spec[key], val)
def test_instance_type_extra_specs_delete(self):
for it in self.inst_types:
specs = it['extra_specs']
key = specs.keys()[0]
del specs[key]
db.flavor_extra_specs_delete(self.ctxt, it['flavorid'], key)
real_specs = db.flavor_extra_specs_get(self.ctxt,
it['flavorid'])
self._assertEqualObjects(it['extra_specs'], real_specs)
def test_instance_type_extra_specs_update_or_create(self):
for it in self.inst_types:
current_specs = it['extra_specs']
current_specs.update(dict(b='b1', c='c1', d='d1', e='e1'))
params = (self.ctxt, it['flavorid'], current_specs)
db.flavor_extra_specs_update_or_create(*params)
real_specs = db.flavor_extra_specs_get(self.ctxt,
it['flavorid'])
self._assertEqualObjects(current_specs, real_specs)
def test_instance_type_extra_specs_update_or_create_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_extra_specs_update_or_create,
self.ctxt, 'nonexists', {})
def test_instance_type_extra_specs_update_or_create_retry(self):
def counted():
def get_id(context, flavorid, session):
get_id.counter += 1
raise db_exc.DBDuplicateEntry
get_id.counter = 0
return get_id
get_id = counted()
self.stubs.Set(sqlalchemy_api,
'_instance_type_get_id_from_flavor', get_id)
self.assertRaises(db_exc.DBDuplicateEntry, sqlalchemy_api.
flavor_extra_specs_update_or_create,
self.ctxt, 1, {}, 5)
self.assertEqual(get_id.counter, 5)
class InstanceTypeAccessTestCase(BaseInstanceTypeTestCase):
def _create_inst_type_access(self, instance_type_id, project_id):
return db.flavor_access_add(self.ctxt, instance_type_id,
project_id)
def test_instance_type_access_get_by_flavor_id(self):
inst_types = ({'name': 'n1', 'flavorid': 'f1'},
{'name': 'n2', 'flavorid': 'f2'})
it1, it2 = tuple((self._create_inst_type(v) for v in inst_types))
access_it1 = [self._create_inst_type_access(it1['flavorid'], 'pr1'),
self._create_inst_type_access(it1['flavorid'], 'pr2')]
access_it2 = [self._create_inst_type_access(it2['flavorid'], 'pr1')]
for it, access_it in zip((it1, it2), (access_it1, access_it2)):
params = (self.ctxt, it['flavorid'])
real_access_it = db.flavor_access_get_by_flavor_id(*params)
self._assertEqualListsOfObjects(access_it, real_access_it)
def test_instance_type_access_get_by_flavor_id_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id,
self.ctxt, 'nonexists')
def test_instance_type_access_add(self):
inst_type = self._create_inst_type({'flavorid': 'f1'})
project_id = 'p1'
access = self._create_inst_type_access(inst_type['flavorid'],
project_id)
# NOTE(boris-42): Check that instance_type_access_add doesn't fail and
# returns correct value. This is enough because other
# logic is checked by other methods.
self.assertFalse(access['id'] is None)
self.assertEqual(access['instance_type_id'], inst_type['id'])
self.assertEqual(access['project_id'], project_id)
def test_instance_type_access_add_to_non_existing_flavor(self):
self.assertRaises(exception.FlavorNotFound,
self._create_inst_type_access,
'nonexists', 'does_not_matter')
def test_instance_type_access_add_duplicate_project_id_flavor(self):
inst_type = self._create_inst_type({'flavorid': 'f1'})
params = (inst_type['flavorid'], 'p1')
self._create_inst_type_access(*params)
self.assertRaises(exception.FlavorAccessExists,
self._create_inst_type_access, *params)
def test_instance_type_access_remove(self):
inst_types = ({'name': 'n1', 'flavorid': 'f1'},
{'name': 'n2', 'flavorid': 'f2'})
it1, it2 = tuple((self._create_inst_type(v) for v in inst_types))
access_it1 = [self._create_inst_type_access(it1['flavorid'], 'pr1'),
self._create_inst_type_access(it1['flavorid'], 'pr2')]
access_it2 = [self._create_inst_type_access(it2['flavorid'], 'pr1')]
db.flavor_access_remove(self.ctxt, it1['flavorid'],
access_it1[1]['project_id'])
for it, access_it in zip((it1, it2), (access_it1[:1], access_it2)):
params = (self.ctxt, it['flavorid'])
real_access_it = db.flavor_access_get_by_flavor_id(*params)
self._assertEqualListsOfObjects(access_it, real_access_it)
def test_instance_type_access_remove_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_access_remove,
self.ctxt, 'nonexists', 'does_not_matter')
def test_instance_type_access_remove_access_not_found(self):
inst_type = self._create_inst_type({'flavorid': 'f1'})
params = (inst_type['flavorid'], 'p1')
self._create_inst_type_access(*params)
self.assertRaises(exception.FlavorAccessNotFound,
db.flavor_access_remove,
self.ctxt, inst_type['flavorid'], 'p2')
def test_instance_type_access_removed_after_instance_type_destroy(self):
inst_type1 = self._create_inst_type({'flavorid': 'f1', 'name': 'n1'})
inst_type2 = self._create_inst_type({'flavorid': 'f2', 'name': 'n2'})
values = [
(inst_type1['flavorid'], 'p1'),
(inst_type1['flavorid'], 'p2'),
(inst_type2['flavorid'], 'p3')
]
for v in values:
self._create_inst_type_access(*v)
db.flavor_destroy(self.ctxt, inst_type1['name'])
p = (self.ctxt, inst_type1['flavorid'])
self.assertEqual(0, len(db.flavor_access_get_by_flavor_id(*p)))
p = (self.ctxt, inst_type2['flavorid'])
self.assertEqual(1, len(db.flavor_access_get_by_flavor_id(*p)))
db.flavor_destroy(self.ctxt, inst_type2['name'])
self.assertEqual(0, len(db.flavor_access_get_by_flavor_id(*p)))
class FixedIPTestCase(BaseInstanceTypeTestCase):
def _timeout_test(self, ctxt, timeout, multi_host):
instance = db.instance_create(ctxt, dict(host='foo'))
net = db.network_create_safe(ctxt, dict(multi_host=multi_host,
host='bar'))
old = timeout - datetime.timedelta(seconds=5)
new = timeout + datetime.timedelta(seconds=5)
# should deallocate
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=net['id'],
updated_at=old))
# still allocated
db.fixed_ip_create(ctxt, dict(allocated=True,
instance_uuid=instance['uuid'],
network_id=net['id'],
updated_at=old))
# wrong network
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=None,
updated_at=old))
# too new
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=None,
updated_at=new))
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(exc.DataError(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()))
self.mox.ReplayAll()
def test_fixed_ip_disassociate_all_by_timeout_single_host(self):
now = timeutils.utcnow()
self._timeout_test(self.ctxt, now, False)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'foo', now)
self.assertEqual(result, 0)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'bar', now)
self.assertEqual(result, 1)
def test_fixed_ip_disassociate_all_by_timeout_multi_host(self):
now = timeutils.utcnow()
self._timeout_test(self.ctxt, now, True)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'foo', now)
self.assertEqual(result, 1)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'bar', now)
self.assertEqual(result, 0)
def test_fixed_ip_get_by_floating_address(self):
fixed_ip = db.fixed_ip_create(self.ctxt, {'address': '192.168.0.2'})
values = {'address': '8.7.6.5',
'fixed_ip_id': fixed_ip['id']}
floating = db.floating_ip_create(self.ctxt, values)['address']
fixed_ip_ref = db.fixed_ip_get_by_floating_address(self.ctxt, floating)
self._assertEqualObjects(fixed_ip, fixed_ip_ref)
def test_fixed_ip_get_by_host(self):
host_ips = {
'host1': ['1.1.1.1', '1.1.1.2', '1.1.1.3'],
'host2': ['1.1.1.4', '1.1.1.5'],
'host3': ['1.1.1.6']
}
for host, ips in host_ips.iteritems():
for ip in ips:
instance_uuid = self._create_instance(host=host)
db.fixed_ip_create(self.ctxt, {'address': ip})
db.fixed_ip_associate(self.ctxt, ip, instance_uuid)
for host, ips in host_ips.iteritems():
ips_on_host = map(lambda x: x['address'],
db.fixed_ip_get_by_host(self.ctxt, host))
self._assertEqualListsOfPrimitivesAsSets(ips_on_host, ips)
def test_fixed_ip_get_by_network_host_not_found_exception(self):
self.assertRaises(
exception.FixedIpNotFoundForNetworkHost,
db.fixed_ip_get_by_network_host,
self.ctxt, 1, 'ignore')
def test_fixed_ip_get_by_network_host_fixed_ip_found(self):
db.fixed_ip_create(self.ctxt, dict(network_id=1, host='host'))
fip = db.fixed_ip_get_by_network_host(self.ctxt, 1, 'host')
self.assertEquals(1, fip['network_id'])
self.assertEquals('host', fip['host'])
def _create_instance(self, **kwargs):
instance = db.instance_create(self.ctxt, kwargs)
return instance['uuid']
def test_fixed_ip_get_by_instance_fixed_ip_found(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets([FIXED_IP_ADDRESS],
[ips_list[0].address])
def test_fixed_ip_get_by_instance_multiple_fixed_ips_found(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_2))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ip_get_by_instance_inappropriate_ignored(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_2))
another_instance = db.instance_create(self.ctxt, {})
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=another_instance['uuid'], address="192.168.1.7"))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ip_get_by_instance_not_found_exception(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.FixedIpNotFoundForInstance,
db.fixed_ip_get_by_instance,
self.ctxt, instance_uuid)
def test_fixed_ips_by_virtual_interface_fixed_ip_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets([FIXED_IP_ADDRESS],
[ips_list[0].address])
def test_fixed_ips_by_virtual_interface_multiple_fixed_ips_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_2))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ips_by_virtual_interface_inappropriate_ignored(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_2))
another_vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=another_vif.id, address="192.168.1.7"))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ips_by_virtual_interface_no_ip_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self.assertEquals(0, len(ips_list))
def create_fixed_ip(self, **params):
default_params = {'address': '192.168.0.1'}
default_params.update(params)
return db.fixed_ip_create(self.ctxt, default_params)['address']
def test_fixed_ip_associate_fails_if_ip_not_in_network(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.FixedIpNotFoundForNetwork,
db.fixed_ip_associate,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_fails_if_ip_in_use(self):
instance_uuid = self._create_instance()
address = self.create_fixed_ip(instance_uuid=instance_uuid)
self.assertRaises(exception.FixedIpAlreadyInUse,
db.fixed_ip_associate,
self.ctxt, address, instance_uuid)
def test_fixed_ip_associate_succeeds(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
def test_fixed_ip_associate_succeeds_and_sets_network(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip()
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
self.assertEqual(fixed_ip['network_id'], network['id'])
def test_fixed_ip_associate_pool_invalid_uuid(self):
instance_uuid = '123'
self.assertRaises(exception.InvalidUUID, db.fixed_ip_associate_pool,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_pool_no_more_fixed_ips(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.NoMoreFixedIps, db.fixed_ip_associate_pool,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_pool_succeeds(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
db.fixed_ip_associate_pool(self.ctxt, network['id'], instance_uuid)
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
def test_fixed_ip_create_same_address(self):
address = '192.168.1.5'
params = {'address': address}
db.fixed_ip_create(self.ctxt, params)
self.assertRaises(exception.FixedIpExists, db.fixed_ip_create,
self.ctxt, params)
def test_fixed_ip_create_success(self):
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': '192.168.1.5',
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_data = db.fixed_ip_create(self.ctxt, param)
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
def test_fixed_ip_bulk_create_same_address(self):
address_1 = '192.168.1.5'
address_2 = '192.168.1.6'
instance_uuid = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
params = [
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_2, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_1, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': 'localhost', 'address': address_2, 'allocated': True,
'instance_uuid': instance_uuid, 'network_id': network_id_2,
'virtual_interface_id': None},
]
self.assertRaises(exception.FixedIpExists, db.fixed_ip_bulk_create,
self.ctxt, params)
# In this case the transaction will be rolled back and none of the ips
# will make it to the database.
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address_1)
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address_2)
def test_fixed_ip_bulk_create_success(self):
address_1 = '192.168.1.5'
address_2 = '192.168.1.6'
instance_uuid = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
params = [
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_1, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': 'localhost', 'address': address_2, 'allocated': True,
'instance_uuid': instance_uuid, 'network_id': network_id_2,
'virtual_interface_id': None}
]
db.fixed_ip_bulk_create(self.ctxt, params)
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_data = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
# we have no `id` in incoming data so we can not use
# _assertEqualListsOfObjects to compare incoming data and received
# objects
fixed_ip_data = sorted(fixed_ip_data, key=lambda i: i['network_id'])
params = sorted(params, key=lambda i: i['network_id'])
for param, ip in zip(params, fixed_ip_data):
self._assertEqualObjects(param, ip, ignored_keys)
def test_fixed_ip_disassociate(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
db.fixed_ip_create(self.ctxt, param)
db.fixed_ip_disassociate(self.ctxt, address)
fixed_ip_data = db.fixed_ip_get_by_address(self.ctxt, address)
ignored_keys = ['created_at', 'id', 'deleted_at',
'updated_at', 'instance_uuid']
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
self.assertIsNone(fixed_ip_data['instance_uuid'])
def test_fixed_ip_get_not_found_exception(self):
self.assertRaises(exception.FixedIpNotFound,
db.fixed_ip_get, self.ctxt, 0)
def test_fixed_ip_get_success2(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
fixed_ip_id = db.fixed_ip_create(self.ctxt, param)
self.ctxt.is_admin = False
self.assertRaises(exception.NotAuthorized, db.fixed_ip_get,
self.ctxt, fixed_ip_id)
def test_fixed_ip_get_success(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
db.fixed_ip_create(self.ctxt, param)
fixed_ip_id = db.fixed_ip_get_by_address(self.ctxt, address)['id']
fixed_ip_data = db.fixed_ip_get(self.ctxt, fixed_ip_id)
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
def test_fixed_ip_get_by_address_detailed_not_found_exception(self):
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address_detailed, self.ctxt,
'192.168.1.5')
def test_fixed_ip_get_by_address_with_data_error_exception(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.FixedIpInvalid,
db.fixed_ip_get_by_address_detailed, self.ctxt,
'192.168.1.6')
def test_fixed_ip_get_by_address_detailed_sucsess(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
db.fixed_ip_create(self.ctxt, param)
fixed_ip_data = db.fixed_ip_get_by_address_detailed(self.ctxt, address)
# fixed ip check here
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
self._assertEqualObjects(param, fixed_ip_data[0], ignored_keys)
# network model check here
network_data = db.network_get(self.ctxt, network_id)
self._assertEqualObjects(network_data, fixed_ip_data[1])
# Instance check here
instance_data = db.instance_get_by_uuid(self.ctxt, instance_uuid)
ignored_keys = ['info_cache', 'system_metadata',
'security_groups', 'metadata'] # HOW ????
self._assertEqualObjects(instance_data, fixed_ip_data[2], ignored_keys)
def test_fixed_ip_update_not_found_for_address(self):
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_update, self.ctxt,
'192.168.1.5', {})
def test_fixed_ip_update(self):
instance_uuid_1 = self._create_instance()
instance_uuid_2 = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
param_1 = {
'reserved': True, 'deleted': 0, 'leased': True,
'host': '192.168.133.1', 'address': '10.0.0.2',
'allocated': True, 'instance_uuid': instance_uuid_1,
'network_id': network_id_1, 'virtual_interface_id': '123',
}
param_2 = {
'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': '10.0.0.3', 'allocated': False,
'instance_uuid': instance_uuid_2, 'network_id': network_id_2,
'virtual_interface_id': None
}
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_addr = db.fixed_ip_create(self.ctxt, param_1)['address']
db.fixed_ip_update(self.ctxt, fixed_ip_addr, param_2)
fixed_ip_after_update = db.fixed_ip_get_by_address(self.ctxt,
param_2['address'])
self._assertEqualObjects(param_2, fixed_ip_after_update, ignored_keys)
class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(FloatingIpTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'address': '1.1.1.1',
'fixed_ip_id': None,
'project_id': 'fake_project',
'host': 'fake_host',
'auto_assigned': False,
'pool': 'fake_pool',
'interface': 'fake_interface',
}
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(exc.DataError(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()))
self.mox.ReplayAll()
def _create_floating_ip(self, values):
if not values:
values = {}
vals = self._get_base_values()
vals.update(values)
return db.floating_ip_create(self.ctxt, vals)
def test_floating_ip_get(self):
values = [{'address': '0.0.0.0'}, {'address': '1.1.1.1'}]
floating_ips = [self._create_floating_ip(val) for val in values]
for floating_ip in floating_ips:
real_floating_ip = db.floating_ip_get(self.ctxt, floating_ip['id'])
self._assertEqualObjects(floating_ip, real_floating_ip,
ignored_keys=['fixed_ip'])
def test_floating_ip_get_not_found(self):
self.assertRaises(exception.FloatingIpNotFound,
db.floating_ip_get, self.ctxt, 100500)
def test_floating_ip_get_with_long_id_not_found(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidID,
db.floating_ip_get, self.ctxt, 123456789101112)
def test_floating_ip_get_pools(self):
values = [
{'address': '0.0.0.0', 'pool': 'abc'},
{'address': '1.1.1.1', 'pool': 'abc'},
{'address': '2.2.2.2', 'pool': 'def'},
{'address': '3.3.3.3', 'pool': 'ghi'},
]
for val in values:
self._create_floating_ip(val)
expected_pools = [{'name': x}
for x in set(map(lambda x: x['pool'], values))]
real_pools = db.floating_ip_get_pools(self.ctxt)
self._assertEqualListsOfPrimitivesAsSets(real_pools, expected_pools)
def test_floating_ip_allocate_address(self):
pools = {
'pool1': ['0.0.0.0', '1.1.1.1'],
'pool2': ['2.2.2.2'],
'pool3': ['3.3.3.3', '4.4.4.4', '5.5.5.5']
}
for pool, addresses in pools.iteritems():
for address in addresses:
vals = {'pool': pool, 'address': address, 'project_id': None}
self._create_floating_ip(vals)
project_id = self._get_base_values()['project_id']
for pool, addresses in pools.iteritems():
alloc_addrs = []
for i in addresses:
float_addr = db.floating_ip_allocate_address(self.ctxt,
project_id, pool)
alloc_addrs.append(float_addr)
self._assertEqualListsOfPrimitivesAsSets(alloc_addrs, addresses)
def test_floating_ip_allocate_address_no_more_floating_ips(self):
self.assertRaises(exception.NoMoreFloatingIps,
db.floating_ip_allocate_address,
self.ctxt, 'any_project_id', 'no_such_pool')
def test_floating_ip_allocate_not_authorized(self):
ctxt = context.RequestContext(user_id='a', project_id='abc',
is_admin=False)
self.assertRaises(exception.NotAuthorized,
db.floating_ip_allocate_address,
ctxt, 'other_project_id', 'any_pool')
def _get_existing_ips(self):
return [ip['address'] for ip in db.floating_ip_get_all(self.ctxt)]
def test_floating_ip_bulk_create(self):
expected_ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
db.floating_ip_bulk_create(self.ctxt,
map(lambda x: {'address': x}, expected_ips))
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
expected_ips)
def test_floating_ip_bulk_create_duplicate(self):
ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
prepare_ips = lambda x: {'address': x}
db.floating_ip_bulk_create(self.ctxt, map(prepare_ips, ips))
self.assertRaises(exception.FloatingIpExists,
db.floating_ip_bulk_create,
self.ctxt, map(prepare_ips, ['1.1.1.5', '1.1.1.4']))
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_get_by_address,
self.ctxt, '1.1.1.5')
def test_floating_ip_bulk_destroy(self):
ips_for_delete = []
ips_for_non_delete = []
def create_ips(i):
return [{'address': '1.1.%s.%s' % (i, k)} for k in range(1, 256)]
# NOTE(boris-42): Create more then 256 ip to check that
# _ip_range_splitter works properly.
for i in range(1, 3):
ips_for_delete.extend(create_ips(i))
ips_for_non_delete.extend(create_ips(3))
db.floating_ip_bulk_create(self.ctxt,
ips_for_delete + ips_for_non_delete)
db.floating_ip_bulk_destroy(self.ctxt, ips_for_delete)
expected_addresses = map(lambda x: x['address'], ips_for_non_delete)
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
expected_addresses)
def test_floating_ip_create(self):
floating_ip = self._create_floating_ip({})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self.assertFalse(floating_ip['id'] is None)
self._assertEqualObjects(floating_ip, self._get_base_values(),
ignored_keys)
def test_floating_ip_create_duplicate(self):
self._create_floating_ip({})
self.assertRaises(exception.FloatingIpExists,
self._create_floating_ip, {})
def _create_fixed_ip(self, params):
default_params = {'address': '192.168.0.1'}
default_params.update(params)
return db.fixed_ip_create(self.ctxt, default_params)['address']
def test_floating_ip_fixed_ip_associate(self):
float_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
fixed_addresses = ['2.2.2.1', '2.2.2.2', '2.2.2.3']
float_ips = [self._create_floating_ip({'address': address})
for address in float_addresses]
fixed_addrs = [self._create_fixed_ip({'address': address})
for address in fixed_addresses]
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
fixed_ip = db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_addr, 'host')
self.assertEqual(fixed_ip.address, fixed_addr)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertEqual(fixed_ip.id, updated_float_ip.fixed_ip_id)
self.assertEqual('host', updated_float_ip.host)
# Test that already allocated float_ip returns None
result = db.floating_ip_fixed_ip_associate(self.ctxt,
float_addresses[0],
fixed_addresses[0], 'host')
self.assertTrue(result is None)
def test_floating_ip_fixed_ip_associate_float_ip_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_fixed_ip_associate,
self.ctxt, '10.10.10.10', 'some', 'some')
def test_floating_ip_deallocate(self):
values = {'address': '1.1.1.1', 'project_id': 'fake', 'host': 'fake'}
float_ip = self._create_floating_ip(values)
db.floating_ip_deallocate(self.ctxt, float_ip.address)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertTrue(updated_float_ip.project_id is None)
self.assertTrue(updated_float_ip.host is None)
self.assertFalse(updated_float_ip.auto_assigned)
def test_floating_ip_destroy(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
expected_len = len(addresses)
for float_ip in float_ips:
db.floating_ip_destroy(self.ctxt, float_ip.address)
self.assertRaises(exception.FloatingIpNotFound,
db.floating_ip_get, self.ctxt, float_ip.id)
expected_len -= 1
if expected_len > 0:
self.assertEqual(expected_len,
len(db.floating_ip_get_all(self.ctxt)))
else:
self.assertRaises(exception.NoFloatingIpsDefined,
db.floating_ip_get_all, self.ctxt)
def test_floating_ip_disassociate(self):
float_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
fixed_addresses = ['2.2.2.1', '2.2.2.2', '2.2.2.3']
float_ips = [self._create_floating_ip({'address': address})
for address in float_addresses]
fixed_addrs = [self._create_fixed_ip({'address': address})
for address in fixed_addresses]
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_addr, 'host')
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
fixed = db.floating_ip_disassociate(self.ctxt, float_ip.address)
self.assertEqual(fixed.address, fixed_addr)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertTrue(updated_float_ip.fixed_ip_id is None)
self.assertTrue(updated_float_ip.host is None)
def test_floating_ip_disassociate_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_disassociate, self.ctxt,
'11.11.11.11')
def test_floating_ip_set_auto_assigned(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr,
'auto_assigned': False})
for addr in addresses]
for i in range(2):
db.floating_ip_set_auto_assigned(self.ctxt, float_ips[i].address)
for i in range(2):
float_ip = db.floating_ip_get(self.ctxt, float_ips[i].id)
self.assertTrue(float_ip.auto_assigned)
float_ip = db.floating_ip_get(self.ctxt, float_ips[2].id)
self.assertFalse(float_ip.auto_assigned)
def test_floating_ip_get_all(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
self._assertEqualListsOfObjects(float_ips,
db.floating_ip_get_all(self.ctxt))
def test_floating_ip_get_all_not_found(self):
self.assertRaises(exception.NoFloatingIpsDefined,
db.floating_ip_get_all, self.ctxt)
def test_floating_ip_get_all_by_host(self):
hosts = {
'host1': ['1.1.1.1', '1.1.1.2'],
'host2': ['2.1.1.1', '2.1.1.2'],
'host3': ['3.1.1.1', '3.1.1.2', '3.1.1.3']
}
hosts_with_float_ips = {}
for host, addresses in hosts.iteritems():
hosts_with_float_ips[host] = []
for address in addresses:
float_ip = self._create_floating_ip({'host': host,
'address': address})
hosts_with_float_ips[host].append(float_ip)
for host, float_ips in hosts_with_float_ips.iteritems():
real_float_ips = db.floating_ip_get_all_by_host(self.ctxt, host)
self._assertEqualListsOfObjects(float_ips, real_float_ips)
def test_floating_ip_get_all_by_host_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForHost,
db.floating_ip_get_all_by_host,
self.ctxt, 'non_exists_host')
def test_floating_ip_get_all_by_project(self):
projects = {
'pr1': ['1.1.1.1', '1.1.1.2'],
'pr2': ['2.1.1.1', '2.1.1.2'],
'pr3': ['3.1.1.1', '3.1.1.2', '3.1.1.3']
}
projects_with_float_ips = {}
for project_id, addresses in projects.iteritems():
projects_with_float_ips[project_id] = []
for address in addresses:
float_ip = self._create_floating_ip({'project_id': project_id,
'address': address})
projects_with_float_ips[project_id].append(float_ip)
for project_id, float_ips in projects_with_float_ips.iteritems():
real_float_ips = db.floating_ip_get_all_by_project(self.ctxt,
project_id)
self._assertEqualListsOfObjects(float_ips, real_float_ips,
ignored_keys='fixed_ip')
def test_floating_ip_get_all_by_project_not_authorized(self):
ctxt = context.RequestContext(user_id='a', project_id='abc',
is_admin=False)
self.assertRaises(exception.NotAuthorized,
db.floating_ip_get_all_by_project,
ctxt, 'other_project')
def test_floating_ip_get_by_address(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
for float_ip in float_ips:
real_float_ip = db.floating_ip_get_by_address(self.ctxt,
float_ip.address)
self._assertEqualObjects(float_ip, real_float_ip,
ignored_keys='fixed_ip')
def test_floating_ip_get_by_address_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_get_by_address,
self.ctxt, '20.20.20.20')
def test_floating_ip_get_by_invalid_address(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidIpAddressError,
db.floating_ip_get_by_address,
self.ctxt, 'non_exists_host')
def test_floating_ip_get_by_fixed_address(self):
fixed_float = [
('1.1.1.1', '2.2.2.1'),
('1.1.1.2', '2.2.2.2'),
('1.1.1.3', '2.2.2.3')
]
for fixed_addr, float_addr in fixed_float:
self._create_floating_ip({'address': float_addr})
self._create_fixed_ip({'address': fixed_addr})
db.floating_ip_fixed_ip_associate(self.ctxt, float_addr,
fixed_addr, 'some_host')
for fixed_addr, float_addr in fixed_float:
float_ip = db.floating_ip_get_by_fixed_address(self.ctxt,
fixed_addr)
self.assertEqual(float_addr, float_ip[0]['address'])
def test_floating_ip_get_by_fixed_ip_id(self):
fixed_float = [
('1.1.1.1', '2.2.2.1'),
('1.1.1.2', '2.2.2.2'),
('1.1.1.3', '2.2.2.3')
]
for fixed_addr, float_addr in fixed_float:
self._create_floating_ip({'address': float_addr})
self._create_fixed_ip({'address': fixed_addr})
db.floating_ip_fixed_ip_associate(self.ctxt, float_addr,
fixed_addr, 'some_host')
for fixed_addr, float_addr in fixed_float:
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, fixed_addr)
float_ip = db.floating_ip_get_by_fixed_ip_id(self.ctxt,
fixed_ip['id'])
self.assertEqual(float_addr, float_ip[0]['address'])
def test_floating_ip_update(self):
float_ip = self._create_floating_ip({})
values = {
'project_id': 'some_pr',
'host': 'some_host',
'auto_assigned': True,
'interface': 'some_interface',
'pool': 'some_pool'
}
db.floating_ip_update(self.ctxt, float_ip['address'], values)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip['id'])
self._assertEqualObjects(updated_float_ip, values,
ignored_keys=['id', 'address', 'updated_at',
'deleted_at', 'created_at',
'deleted', 'fixed_ip_id',
'fixed_ip'])
def test_floating_ip_update_to_duplicate(self):
float_ip1 = self._create_floating_ip({'address': '1.1.1.1'})
float_ip2 = self._create_floating_ip({'address': '1.1.1.2'})
self.assertRaises(exception.FloatingIpExists,
db.floating_ip_update,
self.ctxt, float_ip2['address'],
{'address': float_ip1['address']})
class InstanceDestroyConstraints(test.TestCase):
def test_destroy_with_equal_any_constraint_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.equal_any('deleting'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_equal_any_constraint_not_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'vm_state': 'resize'})
constraint = db.constraint(vm_state=db.equal_any('active', 'error'))
self.assertRaises(exception.ConstraintNotMet, db.instance_destroy,
ctx, instance['uuid'], constraint)
instance = db.instance_get_by_uuid(ctx, instance['uuid'])
self.assertFalse(instance['deleted'])
def test_destroy_with_not_equal_constraint_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.not_equal('error', 'resize'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_not_equal_constraint_not_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'vm_state': 'active'})
constraint = db.constraint(vm_state=db.not_equal('active', 'error'))
self.assertRaises(exception.ConstraintNotMet, db.instance_destroy,
ctx, instance['uuid'], constraint)
instance = db.instance_get_by_uuid(ctx, instance['uuid'])
self.assertFalse(instance['deleted'])
class VolumeUsageDBApiTestCase(test.TestCase):
def setUp(self):
super(VolumeUsageDBApiTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.useFixture(test.TimeOverride())
def test_vol_usage_update_no_totals_update(self):
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
self.mox.StubOutWithMock(timeutils, 'utcnow')
timeutils.utcnow().AndReturn(now)
timeutils.utcnow().AndReturn(now)
timeutils.utcnow().AndReturn(now)
self.mox.ReplayAll()
expected_vol_usages = [{'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'user_id': 'fake-user-uuid1',
'curr_reads': 1000,
'curr_read_bytes': 2000,
'curr_writes': 3000,
'curr_write_bytes': 4000,
'curr_last_refreshed': now,
'tot_reads': 0,
'tot_read_bytes': 0,
'tot_writes': 0,
'tot_write_bytes': 0,
'tot_last_refreshed': None},
{'volume_id': u'2',
'instance_uuid': 'fake-instance-uuid2',
'project_id': 'fake-project-uuid2',
'user_id': 'fake-user-uuid2',
'curr_reads': 100,
'curr_read_bytes': 200,
'curr_writes': 300,
'curr_write_bytes': 400,
'tot_reads': 0,
'tot_read_bytes': 0,
'tot_writes': 0,
'tot_write_bytes': 0,
'tot_last_refreshed': None}]
def _compare(vol_usage, expected):
for key, value in expected.items():
self.assertEqual(vol_usage[key], value)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1', rd_req=10, rd_bytes=20,
wr_req=30, wr_bytes=40,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
user_id='fake-user-uuid1',
availability_zone='fake-az')
db.vol_usage_update(ctxt, u'2', rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid2',
project_id='fake-project-uuid2',
user_id='fake-user-uuid2',
availability_zone='fake-az')
db.vol_usage_update(ctxt, u'1', rd_req=1000, rd_bytes=2000,
wr_req=3000, wr_bytes=4000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
user_id='fake-user-uuid1',
availability_zone='fake-az')
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 2)
_compare(vol_usages[0], expected_vol_usages[0])
_compare(vol_usages[1], expected_vol_usages[1])
def test_vol_usage_update_totals_update(self):
ctxt = context.get_admin_context()
now = datetime.datetime(1, 1, 1, 1, 0, 0)
start_time = now - datetime.timedelta(seconds=10)
self.mox.StubOutWithMock(timeutils, 'utcnow')
timeutils.utcnow().AndReturn(now)
now1 = now + datetime.timedelta(minutes=1)
timeutils.utcnow().AndReturn(now1)
now2 = now + datetime.timedelta(minutes=2)
timeutils.utcnow().AndReturn(now2)
now3 = now + datetime.timedelta(minutes=3)
timeutils.utcnow().AndReturn(now3)
self.mox.ReplayAll()
db.vol_usage_update(ctxt, u'1', rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az')
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 0)
self.assertEqual(current_usage['curr_reads'], 100)
db.vol_usage_update(ctxt, u'1', rd_req=200, rd_bytes=300,
wr_req=400, wr_bytes=500,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az',
update_totals=True)
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 200)
self.assertEqual(current_usage['curr_reads'], 0)
db.vol_usage_update(ctxt, u'1', rd_req=300, rd_bytes=400,
wr_req=500, wr_bytes=600,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
availability_zone='fake-az',
user_id='fake-user-uuid')
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 200)
self.assertEqual(current_usage['curr_reads'], 300)
db.vol_usage_update(ctxt, u'1', rd_req=400, rd_bytes=500,
wr_req=600, wr_bytes=700,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az',
update_totals=True)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
expected_vol_usages = {'volume_id': u'1',
'project_id': 'fake-project-uuid',
'user_id': 'fake-user-uuid',
'instance_uuid': 'fake-instance-uuid',
'availability_zone': 'fake-az',
'tot_reads': 600,
'tot_read_bytes': 800,
'tot_writes': 1000,
'tot_write_bytes': 1200,
'tot_last_refreshed': now3,
'curr_reads': 0,
'curr_read_bytes': 0,
'curr_writes': 0,
'curr_write_bytes': 0,
'curr_last_refreshed': now2}
self.assertEquals(1, len(vol_usages))
for key, value in expected_vol_usages.items():
self.assertEqual(vol_usages[0][key], value, key)
def test_vol_usage_update_when_blockdevicestats_reset(self):
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1',
rd_req=10000, rd_bytes=20000,
wr_req=30000, wr_bytes=40000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
# Instance rebooted or crashed. block device stats were reset and are
# less then the previous values
db.vol_usage_update(ctxt, u'1',
rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
db.vol_usage_update(ctxt, u'1',
rd_req=200, rd_bytes=300,
wr_req=400, wr_bytes=500,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
vol_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
expected_vol_usage = {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'availability_zone': 'fake-az',
'user_id': 'fake-user-uuid1',
'curr_reads': 200,
'curr_read_bytes': 300,
'curr_writes': 400,
'curr_write_bytes': 500,
'tot_reads': 10000,
'tot_read_bytes': 20000,
'tot_writes': 30000,
'tot_write_bytes': 40000}
for key, value in expected_vol_usage.items():
self.assertEqual(vol_usage[key], value, key)
def test_vol_usage_update_totals_update_when_blockdevicestats_reset(self):
# This is unlikely to happen, but could when a volume is detached
# right after a instance has rebooted / recovered and before
# the system polled and updated the volume usage cache table.
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1',
rd_req=10000, rd_bytes=20000,
wr_req=30000, wr_bytes=40000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
# Instance rebooted or crashed. block device stats were reset and are
# less then the previous values
db.vol_usage_update(ctxt, u'1',
rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1',
update_totals=True)
vol_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
expected_vol_usage = {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'availability_zone': 'fake-az',
'user_id': 'fake-user-uuid1',
'curr_reads': 0,
'curr_read_bytes': 0,
'curr_writes': 0,
'curr_write_bytes': 0,
'tot_reads': 10100,
'tot_read_bytes': 20200,
'tot_writes': 30300,
'tot_write_bytes': 40400}
for key, value in expected_vol_usage.items():
self.assertEqual(vol_usage[key], value, key)
class TaskLogTestCase(test.TestCase):
def setUp(self):
super(TaskLogTestCase, self).setUp()
self.context = context.get_admin_context()
now = timeutils.utcnow()
self.begin = now - datetime.timedelta(seconds=10)
self.end = now - datetime.timedelta(seconds=5)
self.task_name = 'fake-task-name'
self.host = 'fake-host'
self.message = 'Fake task message'
db.task_log_begin_task(self.context, self.task_name, self.begin,
self.end, self.host, message=self.message)
def test_task_log_get(self):
result = db.task_log_get(self.context, self.task_name, self.begin,
self.end, self.host)
self.assertEqual(result['task_name'], self.task_name)
self.assertEqual(result['period_beginning'], self.begin)
self.assertEqual(result['period_ending'], self.end)
self.assertEqual(result['host'], self.host)
self.assertEqual(result['message'], self.message)
def test_task_log_get_all(self):
result = db.task_log_get_all(self.context, self.task_name, self.begin,
self.end, host=self.host)
self.assertEqual(len(result), 1)
def test_task_log_begin_task(self):
db.task_log_begin_task(self.context, 'fake', self.begin,
self.end, self.host, message=self.message)
result = db.task_log_get(self.context, 'fake', self.begin,
self.end, self.host)
self.assertEqual(result['task_name'], 'fake')
def test_task_log_begin_task_duplicate(self):
params = (self.context, 'fake', self.begin, self.end, self.host)
db.task_log_begin_task(*params, message=self.message)
self.assertRaises(exception.TaskAlreadyRunning,
db.task_log_begin_task,
*params, message=self.message)
def test_task_log_end_task(self):
errors = 1
db.task_log_end_task(self.context, self.task_name, self.begin,
self.end, self.host, errors, message=self.message)
result = db.task_log_get(self.context, self.task_name, self.begin,
self.end, self.host)
self.assertEqual(result['errors'], 1)
class BlockDeviceMappingTestCase(test.TestCase):
def setUp(self):
super(BlockDeviceMappingTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, {})
def _create_bdm(self, values):
values.setdefault('instance_uuid', self.instance['uuid'])
values.setdefault('device_name', 'fake_device')
values.setdefault('source_type', 'volume')
values.setdefault('destination_type', 'volume')
block_dev = block_device.BlockDeviceDict(values)
db.block_device_mapping_create(self.ctxt, block_dev, legacy=False)
uuid = block_dev['instance_uuid']
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
for bdm in bdms:
if bdm['device_name'] == values['device_name']:
return bdm
def test_scrub_empty_str_values_no_effect(self):
values = {'volume_size': 5}
expected = copy.copy(values)
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, expected)
def test_scrub_empty_str_values_empty_string(self):
values = {'volume_size': ''}
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, {})
def test_scrub_empty_str_values_empty_unicode(self):
values = {'volume_size': u''}
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, {})
def test_block_device_mapping_create(self):
bdm = self._create_bdm({})
self.assertFalse(bdm is None)
def test_block_device_mapping_update(self):
bdm = self._create_bdm({})
result = db.block_device_mapping_update(
self.ctxt, bdm['id'], {'destination_type': 'moon'},
legacy=False)
uuid = bdm['instance_uuid']
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(bdm_real[0]['destination_type'], 'moon')
# Also make sure the update call returned correct data
self.assertEqual(dict(bdm_real[0].iteritems()),
dict(result.iteritems()))
def test_block_device_mapping_update_or_create(self):
values = {
'instance_uuid': self.instance['uuid'],
'device_name': 'fake_name',
'source_type': 'volume',
'destination_type': 'volume'
}
# check create
db.block_device_mapping_update_or_create(self.ctxt, values,
legacy=False)
uuid = values['instance_uuid']
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
self.assertEqual(bdm_real[0]['device_name'], 'fake_name')
# check update
values['destination_type'] = 'camelot'
db.block_device_mapping_update_or_create(self.ctxt, values,
legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
bdm_real = bdm_real[0]
self.assertEqual(bdm_real['device_name'], 'fake_name')
self.assertEqual(bdm_real['destination_type'], 'camelot')
def test_block_device_mapping_update_or_create_check_remove_virt(self):
uuid = self.instance['uuid']
values = {
'instance_uuid': uuid,
'source_type': 'blank',
'guest_format': 'swap',
}
# check that old swap bdms are deleted on create
val1 = dict(values)
val1['device_name'] = 'device1'
db.block_device_mapping_create(self.ctxt, val1, legacy=False)
val2 = dict(values)
val2['device_name'] = 'device2'
db.block_device_mapping_update_or_create(self.ctxt, val2, legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
bdm_real = bdm_real[0]
self.assertEqual(bdm_real['device_name'], 'device2')
self.assertEqual(bdm_real['source_type'], 'blank')
self.assertEqual(bdm_real['guest_format'], 'swap')
db.block_device_mapping_destroy(self.ctxt, bdm_real['id'])
# check that old ephemerals are deleted no matter what
val3 = dict(values)
val3['device_name'] = 'device3'
val3['guest_format'] = None
val4 = dict(values)
val4['device_name'] = 'device4'
val4['guest_format'] = None
db.block_device_mapping_create(self.ctxt, val3, legacy=False)
db.block_device_mapping_create(self.ctxt, val4, legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 2)
val5 = dict(values)
val5['device_name'] = 'device5'
val5['guest_format'] = None
db.block_device_mapping_update_or_create(self.ctxt, val5, legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
bdm_real = bdm_real[0]
self.assertEqual(bdm_real['device_name'], 'device5')
def test_block_device_mapping_get_all_by_instance(self):
uuid1 = self.instance['uuid']
uuid2 = db.instance_create(self.ctxt, {})['uuid']
bmds_values = [{'instance_uuid': uuid1,
'device_name': 'first'},
{'instance_uuid': uuid2,
'device_name': 'second'},
{'instance_uuid': uuid2,
'device_name': 'third'}]
for bdm in bmds_values:
self._create_bdm(bdm)
bmd = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid1)
self.assertEqual(len(bmd), 1)
self.assertEqual(bmd[0]['device_name'], 'first')
bmd = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid2)
self.assertEqual(len(bmd), 2)
def test_block_device_mapping_destroy(self):
bdm = self._create_bdm({})
db.block_device_mapping_destroy(self.ctxt, bdm['id'])
bdm = db.block_device_mapping_get_all_by_instance(self.ctxt,
bdm['instance_uuid'])
self.assertEqual(len(bdm), 0)
def test_block_device_mapping_destory_by_instance_and_volumne(self):
vol_id1 = '69f5c254-1a5b-4fff-acf7-cb369904f58f'
vol_id2 = '69f5c254-1a5b-4fff-acf7-cb369904f59f'
self._create_bdm({'device_name': 'fake1', 'volume_id': vol_id1})
self._create_bdm({'device_name': 'fake2', 'volume_id': vol_id2})
uuid = self.instance['uuid']
db.block_device_mapping_destroy_by_instance_and_volume(self.ctxt, uuid,
vol_id1)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['device_name'], 'fake2')
def test_block_device_mapping_destroy_by_instance_and_device(self):
self._create_bdm({'device_name': 'fake1'})
self._create_bdm({'device_name': 'fake2'})
uuid = self.instance['uuid']
params = (self.ctxt, uuid, 'fake1')
db.block_device_mapping_destroy_by_instance_and_device(*params)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['device_name'], 'fake2')
class AgentBuildTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.agent_build_* methods."""
def setUp(self):
super(AgentBuildTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_agent_build_create_and_get_all(self):
self.assertEqual(0, len(db.agent_build_get_all(self.ctxt)))
agent_build = db.agent_build_create(self.ctxt, {'os': 'GNU/HURD'})
all_agent_builds = db.agent_build_get_all(self.ctxt)
self.assertEqual(1, len(all_agent_builds))
self._assertEqualObjects(agent_build, all_agent_builds[0])
def test_agent_build_get_by_triple(self):
agent_build = db.agent_build_create(self.ctxt, {'hypervisor': 'kvm',
'os': 'FreeBSD', 'architecture': 'x86_64'})
self.assertIsNone(db.agent_build_get_by_triple(self.ctxt, 'kvm',
'FreeBSD', 'i386'))
self._assertEqualObjects(agent_build, db.agent_build_get_by_triple(
self.ctxt, 'kvm', 'FreeBSD', 'x86_64'))
def test_agent_build_destroy(self):
agent_build = db.agent_build_create(self.ctxt, {})
self.assertEqual(1, len(db.agent_build_get_all(self.ctxt)))
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertEqual(0, len(db.agent_build_get_all(self.ctxt)))
def test_agent_build_update(self):
agent_build = db.agent_build_create(self.ctxt, {'os': 'HaikuOS'})
db.agent_build_update(self.ctxt, agent_build.id, {'os': 'ReactOS'})
self.assertEqual('ReactOS', db.agent_build_get_all(self.ctxt)[0].os)
def test_agent_build_destroy_destroyed(self):
agent_build = db.agent_build_create(self.ctxt, {})
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertRaises(exception.AgentBuildNotFound,
db.agent_build_destroy, self.ctxt, agent_build.id)
def test_agent_build_update_destroyed(self):
agent_build = db.agent_build_create(self.ctxt, {'os': 'HaikuOS'})
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertRaises(exception.AgentBuildNotFound,
db.agent_build_update, self.ctxt, agent_build.id, {'os': 'OS/2'})
def test_agent_build_exists(self):
values = {'hypervisor': 'kvm', 'os': 'FreeBSD',
'architecture': 'x86_64'}
db.agent_build_create(self.ctxt, values)
self.assertRaises(exception.AgentBuildExists, db.agent_build_create,
self.ctxt, values)
class VirtualInterfaceTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(VirtualInterfaceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance_uuid = db.instance_create(self.ctxt, {})['uuid']
values = {'host': 'localhost', 'project_id': 'project1'}
self.network = db.network_create_safe(self.ctxt, values)
def _get_base_values(self):
return {
'instance_uuid': self.instance_uuid,
'address': 'fake_address',
'network_id': self.network['id'],
'uuid': str(stdlib_uuid.uuid4())
}
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(exc.DataError(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()))
self.mox.ReplayAll()
def _create_virt_interface(self, values):
v = self._get_base_values()
v.update(values)
return db.virtual_interface_create(self.ctxt, v)
def test_virtual_interface_create(self):
vif = self._create_virt_interface({})
self.assertFalse(vif['id'] is None)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'uuid']
self._assertEqualObjects(vif, self._get_base_values(), ignored_keys)
def test_virtual_interface_create_with_duplicate_address(self):
vif = self._create_virt_interface({})
self.assertRaises(exception.VirtualInterfaceCreateException,
self._create_virt_interface, {"uuid": vif['uuid']})
def test_virtual_interface_get(self):
vifs = [self._create_virt_interface({'address': 'a'}),
self._create_virt_interface({'address': 'b'})]
for vif in vifs:
real_vif = db.virtual_interface_get(self.ctxt, vif['id'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_address(self):
vifs = [self._create_virt_interface({'address': 'first'}),
self._create_virt_interface({'address': 'second'})]
for vif in vifs:
real_vif = db.virtual_interface_get_by_address(self.ctxt,
vif['address'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_address_not_found(self):
self.assertIsNone(db.virtual_interface_get_by_address(self.ctxt,
"i.nv.ali.ip"))
def test_virtual_interface_get_by_address_data_error_exception(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidIpAddressError,
db.virtual_interface_get_by_address,
self.ctxt,
"i.nv.ali.ip")
def test_virtual_interface_get_by_uuid(self):
vifs = [self._create_virt_interface({"address": "address_1"}),
self._create_virt_interface({"address": "address_2"})]
for vif in vifs:
real_vif = db.virtual_interface_get_by_uuid(self.ctxt, vif['uuid'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_instance(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
vifs1 = [self._create_virt_interface({'address': 'fake1'}),
self._create_virt_interface({'address': 'fake2'})]
vifs2 = [self._create_virt_interface({'address': 'fake3',
'instance_uuid': inst_uuid2})]
vifs1_real = db.virtual_interface_get_by_instance(self.ctxt,
self.instance_uuid)
vifs2_real = db.virtual_interface_get_by_instance(self.ctxt,
inst_uuid2)
self._assertEqualListsOfObjects(vifs1, vifs1_real)
self._assertEqualListsOfObjects(vifs2, vifs2_real)
def test_virtual_interface_get_by_instance_and_network(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = {'host': 'localhost', 'project_id': 'project2'}
network_id = db.network_create_safe(self.ctxt, values)['id']
vifs = [self._create_virt_interface({'address': 'fake1'}),
self._create_virt_interface({'address': 'fake2',
'network_id': network_id,
'instance_uuid': inst_uuid2}),
self._create_virt_interface({'address': 'fake3',
'instance_uuid': inst_uuid2})]
for vif in vifs:
params = (self.ctxt, vif['instance_uuid'], vif['network_id'])
r_vif = db.virtual_interface_get_by_instance_and_network(*params)
self._assertEqualObjects(r_vif, vif)
def test_virtual_interface_delete_by_instance(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = [dict(address='fake1'), dict(address='fake2'),
dict(address='fake3', instance_uuid=inst_uuid2)]
for vals in values:
self._create_virt_interface(vals)
db.virtual_interface_delete_by_instance(self.ctxt, self.instance_uuid)
real_vifs1 = db.virtual_interface_get_by_instance(self.ctxt,
self.instance_uuid)
real_vifs2 = db.virtual_interface_get_by_instance(self.ctxt,
inst_uuid2)
self.assertEqual(len(real_vifs1), 0)
self.assertEqual(len(real_vifs2), 1)
def test_virtual_interface_get_all(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = [dict(address='fake1'), dict(address='fake2'),
dict(address='fake3', instance_uuid=inst_uuid2)]
vifs = [self._create_virt_interface(val) for val in values]
real_vifs = db.virtual_interface_get_all(self.ctxt)
self._assertEqualListsOfObjects(vifs, real_vifs)
class NetworkTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.network_* methods."""
def setUp(self):
super(NetworkTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_associated_fixed_ip(self, host, cidr, ip):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': cidr})
self.assertFalse(db.network_in_use_on_host(self.ctxt, network.id,
host))
instance = db.instance_create(self.ctxt,
{'project_id': 'project1', 'host': host})
virtual_interface = db.virtual_interface_create(self.ctxt,
{'instance_uuid': instance.uuid, 'network_id': network.id,
'address': ip})
db.fixed_ip_create(self.ctxt, {'address': ip,
'network_id': network.id, 'allocated': True,
'virtual_interface_id': virtual_interface.id})
db.fixed_ip_associate(self.ctxt, ip, instance.uuid,
network.id)
return network, instance
def test_network_in_use_on_host(self):
network, _ = self._get_associated_fixed_ip('host.net', '192.0.2.0/30',
'192.0.2.1')
self.assertTrue(db.network_in_use_on_host(self.ctxt, network.id,
'host.net'))
def test_network_get_associated_fixed_ips(self):
network, instance = self._get_associated_fixed_ip('host.net',
'192.0.2.0/30', '192.0.2.1')
data = db.network_get_associated_fixed_ips(self.ctxt, network.id)
self.assertEqual(1, len(data))
self.assertEqual('192.0.2.1', data[0]['address'])
self.assertEqual('192.0.2.1', data[0]['vif_address'])
self.assertEqual(instance.uuid, data[0]['instance_uuid'])
self.assertTrue(data[0]['allocated'])
def test_network_create_safe(self):
values = {'host': 'localhost', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(36, len(network['uuid']))
db_network = db.network_get(self.ctxt, network['id'])
self._assertEqualObjects(network, db_network)
def test_network_create_with_duplicate_vlan(self):
values1 = {'host': 'localhost', 'project_id': 'project1', 'vlan': 1}
values2 = {'host': 'something', 'project_id': 'project1', 'vlan': 1}
db.network_create_safe(self.ctxt, values1)
self.assertRaises(exception.DuplicateVlan,
db.network_create_safe, self.ctxt, values2)
def test_network_delete_safe(self):
values = {'host': 'localhost', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
db_network = db.network_get(self.ctxt, network['id'])
values = {'network_id': network['id'], 'address': '192.168.1.5'}
address1 = db.fixed_ip_create(self.ctxt, values)['address']
values = {'network_id': network['id'],
'address': '192.168.1.6',
'allocated': True}
address2 = db.fixed_ip_create(self.ctxt, values)['address']
self.assertRaises(exception.NetworkInUse,
db.network_delete_safe, self.ctxt, network['id'])
db.fixed_ip_update(self.ctxt, address2, {'allocated': False})
network = db.network_delete_safe(self.ctxt, network['id'])
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address1)
ctxt = self.ctxt.elevated(read_deleted='yes')
fixed_ip = db.fixed_ip_get_by_address(ctxt, address1)
self.assertTrue(fixed_ip['deleted'])
def test_network_in_use_on_host(self):
values = {'host': 'foo', 'hostname': 'myname'}
instance = db.instance_create(self.ctxt, values)
values = {'address': '192.168.1.5', 'instance_uuid': instance['uuid']}
vif = db.virtual_interface_create(self.ctxt, values)
values = {'address': '192.168.1.6',
'network_id': 1,
'allocated': True,
'instance_uuid': instance['uuid'],
'virtual_interface_id': vif['id']}
db.fixed_ip_create(self.ctxt, values)
self.assertEqual(db.network_in_use_on_host(self.ctxt, 1, 'foo'), True)
self.assertEqual(db.network_in_use_on_host(self.ctxt, 1, 'bar'), False)
def test_network_update_nonexistent(self):
self.assertRaises(exception.NetworkNotFound,
db.network_update, self.ctxt, 'nonexistent', {})
def test_network_update_with_duplicate_vlan(self):
values1 = {'host': 'localhost', 'project_id': 'project1', 'vlan': 1}
values2 = {'host': 'something', 'project_id': 'project1', 'vlan': 2}
network_ref = db.network_create_safe(self.ctxt, values1)
db.network_create_safe(self.ctxt, values2)
self.assertRaises(exception.DuplicateVlan,
db.network_update, self.ctxt,
network_ref["id"], values2)
def test_network_update(self):
network = db.network_create_safe(self.ctxt, {'project_id': 'project1',
'vlan': 1, 'host': 'test.com'})
db.network_update(self.ctxt, network.id, {'vlan': 2})
network_new = db.network_get(self.ctxt, network.id)
self.assertEqual(2, network_new.vlan)
def test_network_set_host_nonexistent_network(self):
self.assertRaises(exception.NetworkNotFound,
db.network_set_host, self.ctxt, 'nonexistent', 'nonexistent')
def test_network_set_host_with_initially_no_host(self):
values = {'host': 'example.com', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(
db.network_set_host(self.ctxt, network.id, 'new.example.com'),
'example.com')
def test_network_set_host(self):
values = {'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(
db.network_set_host(self.ctxt, network.id, 'example.com'),
'example.com')
self.assertEqual('example.com',
db.network_get(self.ctxt, network.id).host)
def test_network_get_all_by_host(self):
self.assertEqual([],
db.network_get_all_by_host(self.ctxt, 'example.com'))
host = 'h1.example.com'
# network with host set
net1 = db.network_create_safe(self.ctxt, {'host': host})
self._assertEqualListsOfObjects([net1],
db.network_get_all_by_host(self.ctxt, host))
# network with fixed ip with host set
net2 = db.network_create_safe(self.ctxt, {})
db.fixed_ip_create(self.ctxt, {'host': host, 'network_id': net2.id})
data = db.network_get_all_by_host(self.ctxt, host)
self._assertEqualListsOfObjects([net1, net2],
db.network_get_all_by_host(self.ctxt, host))
# network with instance with host set
net3 = db.network_create_safe(self.ctxt, {})
instance = db.instance_create(self.ctxt, {'host': host})
vif = db.virtual_interface_create(self.ctxt,
{'instance_uuid': instance.uuid})
db.fixed_ip_create(self.ctxt, {'network_id': net3.id,
'virtual_interface_id': vif.id})
self._assertEqualListsOfObjects([net1, net2, net3],
db.network_get_all_by_host(self.ctxt, host))
def test_network_get_by_cidr_nonexistent(self):
self.assertRaises(exception.NetworkNotFoundForCidr,
db.network_get_by_cidr(self.ctxt, '192.0.2.0/29'))
def test_network_get_by_cidr(self):
cidr = '192.0.2.0/30'
cidr_v6 = '2001:db8:1::/64'
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': cidr, 'cidr_v6': cidr_v6})
self._assertEqualObjects(network,
db.network_get_by_cidr(self.ctxt, cidr))
self._assertEqualObjects(network,
db.network_get_by_cidr(self.ctxt, cidr_v6))
def test_network_get_by_cidr_nonexistent(self):
self.assertRaises(exception.NetworkNotFoundForCidr,
db.network_get_by_cidr, self.ctxt, '192.0.2.0/30')
def test_network_get_by_uuid(self):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project_1'})
self._assertEqualObjects(network,
db.network_get_by_uuid(self.ctxt, network.uuid))
def test_network_get_by_uuid_nonexistent(self):
self.assertRaises(exception.NetworkNotFoundForUUID,
db.network_get_by_uuid, self.ctxt, 'non-existent-uuid')
def test_network_get_all_by_uuids_no_networks(self):
self.assertRaises(exception.NoNetworksFound,
db.network_get_all_by_uuids, self.ctxt, ['non-existent-uuid'])
def test_network_get_all_by_uuids(self):
net1 = db.network_create_safe(self.ctxt, {})
net2 = db.network_create_safe(self.ctxt, {})
self._assertEqualListsOfObjects([net1, net2],
db.network_get_all_by_uuids(self.ctxt, [net1.uuid, net2.uuid]))
def test_network_get_all_no_networks(self):
self.assertRaises(exception.NoNetworksFound,
db.network_get_all, self.ctxt)
def test_network_get_all(self):
network = db.network_create_safe(self.ctxt, {})
network_db = db.network_get_all(self.ctxt)
self.assertEqual(1, len(network_db))
self._assertEqualObjects(network, network_db[0])
def test_network_get(self):
network = db.network_create_safe(self.ctxt, {})
self._assertEqualObjects(db.network_get(self.ctxt, network.id),
network)
db.network_delete_safe(self.ctxt, network.id)
self.assertRaises(exception.NetworkNotFound,
db.network_get, self.ctxt, network.id)
def test_network_associate(self):
network = db.network_create_safe(self.ctxt, {})
self.assertIsNone(network.project_id)
db.network_associate(self.ctxt, "project1", network.id)
self.assertEqual("project1", db.network_get(self.ctxt,
network.id).project_id)
def test_network_diassociate(self):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'host': 'test.net'})
# disassociate project
db.network_disassociate(self.ctxt, network.id, False, True)
self.assertIsNone(db.network_get(self.ctxt, network.id).project_id)
# disassociate host
db.network_disassociate(self.ctxt, network.id, True, False)
self.assertIsNone(db.network_get(self.ctxt, network.id).host)
def test_network_count_reserved_ips(self):
net = db.network_create_safe(self.ctxt, {})
self.assertEqual(0, db.network_count_reserved_ips(self.ctxt, net.id))
db.fixed_ip_create(self.ctxt, {'network_id': net.id,
'reserved': True})
self.assertEqual(1, db.network_count_reserved_ips(self.ctxt, net.id))
class KeyPairTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(KeyPairTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_key_pair(self, values):
return db.key_pair_create(self.ctxt, values)
def test_key_pair_create(self):
param = {
'name': 'test_1',
'user_id': 'test_user_id_1',
'public_key': 'test_public_key_1',
'fingerprint': 'test_fingerprint_1'
}
key_pair = self._create_key_pair(param)
self.assertTrue(key_pair['id'] is not None)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(key_pair, param, ignored_keys)
def test_key_pair_create_with_duplicate_name(self):
params = {'name': 'test_name', 'user_id': 'test_user_id'}
self._create_key_pair(params)
self.assertRaises(exception.KeyPairExists, self._create_key_pair,
params)
def test_key_pair_get(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1'},
{'name': 'test_2', 'user_id': 'test_user_id_2'},
{'name': 'test_3', 'user_id': 'test_user_id_3'}
]
key_pairs = [self._create_key_pair(p) for p in params]
for key in key_pairs:
real_key = db.key_pair_get(self.ctxt, key['user_id'], key['name'])
self._assertEqualObjects(key, real_key)
def test_key_pair_get_no_results(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
def test_key_pair_get_deleted(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
key_pair_created = self._create_key_pair(param)
db.key_pair_destroy(self.ctxt, param['user_id'], param['name'])
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
ctxt = self.ctxt.elevated(read_deleted='yes')
key_pair_deleted = db.key_pair_get(ctxt, param['user_id'],
param['name'])
ignored_keys = ['deleted', 'created_at', 'updated_at', 'deleted_at']
self._assertEqualObjects(key_pair_deleted, key_pair_created,
ignored_keys)
self.assertEqual(key_pair_deleted['deleted'], key_pair_deleted['id'])
def test_key_pair_get_all_by_user(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1'},
{'name': 'test_2', 'user_id': 'test_user_id_1'},
{'name': 'test_3', 'user_id': 'test_user_id_2'}
]
key_pairs_user_1 = [self._create_key_pair(p) for p in params
if p['user_id'] == 'test_user_id_1']
key_pairs_user_2 = [self._create_key_pair(p) for p in params
if p['user_id'] == 'test_user_id_2']
real_keys_1 = db.key_pair_get_all_by_user(self.ctxt, 'test_user_id_1')
real_keys_2 = db.key_pair_get_all_by_user(self.ctxt, 'test_user_id_2')
self._assertEqualListsOfObjects(key_pairs_user_1, real_keys_1)
self._assertEqualListsOfObjects(key_pairs_user_2, real_keys_2)
def test_key_pair_count_by_user(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1'},
{'name': 'test_2', 'user_id': 'test_user_id_1'},
{'name': 'test_3', 'user_id': 'test_user_id_2'}
]
for p in params:
self._create_key_pair(p)
count_1 = db.key_pair_count_by_user(self.ctxt, 'test_user_id_1')
self.assertEqual(count_1, 2)
count_2 = db.key_pair_count_by_user(self.ctxt, 'test_user_id_2')
self.assertEqual(count_2, 1)
def test_key_pair_destroy(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self._create_key_pair(param)
db.key_pair_destroy(self.ctxt, param['user_id'], param['name'])
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
def test_key_pair_destroy_no_such_key(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self.assertRaises(exception.KeypairNotFound,
db.key_pair_destroy, self.ctxt,
param['user_id'], param['name'])
class QuotaTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.quota_* methods."""
def setUp(self):
super(QuotaTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_quota_create(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
self.assertEqual(quota.resource, 'resource')
self.assertEqual(quota.hard_limit, 99)
self.assertEqual(quota.project_id, 'project1')
def test_quota_get(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
quota_db = db.quota_get(self.ctxt, 'project1', 'resource')
self._assertEqualObjects(quota, quota_db)
def test_quota_get_all_by_project(self):
for i in range(3):
for j in range(3):
db.quota_create(self.ctxt, 'proj%d' % i, 'resource%d' % j, j)
for i in range(3):
quotas_db = db.quota_get_all_by_project(self.ctxt, 'proj%d' % i)
self.assertEqual(quotas_db, {'project_id': 'proj%d' % i,
'resource0': 0,
'resource1': 1,
'resource2': 2})
def test_quota_get_all_by_project_and_user(self):
for i in range(3):
for j in range(3):
db.quota_create(self.ctxt, 'proj%d' % i, 'resource%d' % j,
j - 1, user_id='user%d' % i)
for i in range(3):
quotas_db = db.quota_get_all_by_project_and_user(self.ctxt,
'proj%d' % i,
'user%d' % i)
self.assertEqual(quotas_db, {'project_id': 'proj%d' % i,
'user_id': 'user%d' % i,
'resource0': -1,
'resource1': 0,
'resource2': 1})
def test_quota_update(self):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
db.quota_update(self.ctxt, 'project1', 'resource1', 42)
quota = db.quota_get(self.ctxt, 'project1', 'resource1')
self.assertEqual(quota.hard_limit, 42)
self.assertEqual(quota.resource, 'resource1')
self.assertEqual(quota.project_id, 'project1')
def test_quota_update_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_update, self.ctxt, 'project1', 'resource1', 42)
def test_quota_get_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_get, self.ctxt, 'project1', 'resource1')
def test_quota_reserve_all_resources(self):
quotas = {}
deltas = {}
reservable_resources = {}
for i, resource in enumerate(resources):
if isinstance(resource, ReservableResource):
quotas[resource.name] = db.quota_create(self.ctxt, 'project1',
resource.name, 100)
deltas[resource.name] = i
reservable_resources[resource.name] = resource
usages = {'instances': 3, 'cores': 6, 'ram': 9}
instances = []
for i in range(3):
instances.append(db.instance_create(self.ctxt,
{'vcpus': 2, 'memory_mb': 3,
'project_id': 'project1'}))
usages['fixed_ips'] = 2
network = db.network_create_safe(self.ctxt, {})
for i in range(2):
address = '192.168.0.%d' % i
ip = db.fixed_ip_create(self.ctxt, {'project_id': 'project1',
'address': address,
'network_id': network['id']})
db.fixed_ip_associate(self.ctxt, address,
instances[0].uuid, network['id'])
usages['floating_ips'] = 5
for i in range(5):
db.floating_ip_create(self.ctxt, {'project_id': 'project1'})
usages['security_groups'] = 3
for i in range(3):
db.security_group_create(self.ctxt, {'project_id': 'project1'})
reservations_uuids = db.quota_reserve(self.ctxt, reservable_resources,
quotas, quotas, deltas, None,
None, None, 'project1')
resources_names = reservable_resources.keys()
for reservation_uuid in reservations_uuids:
reservation = db.reservation_get(self.ctxt, reservation_uuid)
usage = db.quota_usage_get(self.ctxt, 'project1',
reservation.resource)
self.assertEqual(usage.in_use, usages[reservation.resource],
'Resource: %s' % reservation.resource)
self.assertEqual(usage.reserved, deltas[reservation.resource])
self.assertIn(reservation.resource, resources_names)
resources_names.remove(reservation.resource)
self.assertEqual(len(resources_names), 0)
def test_quota_destroy_all_by_project(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
db.quota_destroy_all_by_project(self.ctxt, 'project1')
self.assertEqual(db.quota_get_all_by_project(self.ctxt, 'project1'),
{'project_id': 'project1'})
self.assertEqual(db.quota_usage_get_all_by_project(
self.ctxt, 'project1'),
{'project_id': 'project1'})
for r in reservations:
self.assertRaises(exception.ReservationNotFound,
db.reservation_get, self.ctxt, r)
def test_quota_destroy_all_by_project_and_user(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
db.quota_destroy_all_by_project_and_user(self.ctxt, 'project1',
'user1')
self.assertEqual(db.quota_get_all_by_project_and_user(self.ctxt,
'project1', 'user1'),
{'project_id': 'project1',
'user_id': 'user1'})
self.assertEqual(db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'),
{'project_id': 'project1',
'user_id': 'user1'})
for r in reservations:
self.assertRaises(exception.ReservationNotFound,
db.reservation_get, self.ctxt, r)
def test_quota_usage_get_nonexistent(self):
self.assertRaises(exception.QuotaUsageNotFound, db.quota_usage_get,
self.ctxt, 'p1', 'nonexitent_resource')
def test_quota_usage_get(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0')
expected = {'resource': 'resource0', 'project_id': 'p1',
'in_use': 0, 'reserved': 0, 'total': 0}
for key, value in expected.iteritems():
self.assertEqual(value, quota_usage[key])
def test_quota_usage_get_all_by_project(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
expected = {'project_id': 'p1',
'resource0': {'in_use': 0, 'reserved': 0},
'resource1': {'in_use': 1, 'reserved': 1},
'resource2': {'in_use': 2, 'reserved': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project(
self.ctxt, 'p1'))
def test_quota_usage_get_all_by_project_and_user(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
expected = {'project_id': 'p1',
'user_id': 'u1',
'resource0': {'in_use': 0, 'reserved': 0},
'resource1': {'in_use': 1, 'reserved': 1},
'resource2': {'in_use': 2, 'reserved': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'p1', 'u1'))
def test_quota_usage_update_nonexistent(self):
self.assertRaises(exception.QuotaUsageNotFound, db.quota_usage_update,
self.ctxt, 'p1', 'u1', 'resource', in_use=42)
def test_quota_usage_update(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
db.quota_usage_update(self.ctxt, 'p1', 'u1', 'resource0', in_use=42,
reserved=43)
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0', 'u1')
expected = {'resource': 'resource0', 'project_id': 'p1',
'user_id': 'u1', 'in_use': 42, 'reserved': 43, 'total': 85}
for key, value in expected.iteritems():
self.assertEqual(value, quota_usage[key])
def test_quota_create_exists(self):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
self.assertRaises(exception.QuotaExists, db.quota_create, self.ctxt,
'project1', 'resource1', 42)
class QuotaClassTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(QuotaClassTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_quota_class_get_default(self):
params = {
'test_resource1': '10',
'test_resource2': '20',
'test_resource3': '30',
}
for res, limit in params.items():
db.quota_class_create(self.ctxt, 'default', res, limit)
defaults = db.quota_class_get_default(self.ctxt)
self.assertEqual(defaults, dict(class_name='default',
test_resource1=10,
test_resource2=20,
test_resource3=30))
def test_quota_class_create(self):
qc = db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
self.assertEqual(qc.class_name, 'class name')
self.assertEqual(qc.resource, 'resource')
self.assertEqual(qc.hard_limit, 42)
def test_quota_class_get(self):
qc = db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
qc_db = db.quota_class_get(self.ctxt, 'class name', 'resource')
self._assertEqualObjects(qc, qc_db)
def test_quota_class_get_nonexistent(self):
self.assertRaises(exception.QuotaClassNotFound, db.quota_class_get,
self.ctxt, 'nonexistent', 'resource')
def test_quota_class_get_all_by_name(self):
for i in range(3):
for j in range(3):
db.quota_class_create(self.ctxt, 'class%d' % i,
'resource%d' % j, j)
for i in range(3):
classes = db.quota_class_get_all_by_name(self.ctxt, 'class%d' % i)
self.assertEqual(classes, {'class_name': 'class%d' % i,
'resource0': 0, 'resource1': 1, 'resource2': 2})
def test_quota_class_update(self):
db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
db.quota_class_update(self.ctxt, 'class name', 'resource', 43)
self.assertEqual(db.quota_class_get(self.ctxt, 'class name',
'resource').hard_limit, 43)
def test_quota_class_update_nonexistent(self):
self.assertRaises(exception.QuotaClassNotFound, db.quota_class_update,
self.ctxt, 'class name', 'resource', 42)
class S3ImageTestCase(test.TestCase):
def setUp(self):
super(S3ImageTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = [uuidutils.generate_uuid() for i in xrange(3)]
self.images = [db.s3_image_create(self.ctxt, uuid)
for uuid in self.values]
def test_s3_image_create(self):
for ref in self.images:
self.assertTrue(uuidutils.is_uuid_like(ref.uuid))
self.assertEqual(sorted(self.values),
sorted([ref.uuid for ref in self.images]))
def test_s3_image_get_by_uuid(self):
for uuid in self.values:
ref = db.s3_image_get_by_uuid(self.ctxt, uuid)
self.assertTrue(uuidutils.is_uuid_like(ref.uuid))
self.assertEqual(uuid, ref.uuid)
def test_s3_image_get(self):
self.assertEqual(sorted(self.values),
sorted([db.s3_image_get(self.ctxt, ref.id).uuid
for ref in self.images]))
def test_s3_image_get_not_found(self):
self.assertRaises(exception.ImageNotFound, db.s3_image_get, self.ctxt,
100500)
def test_s3_image_get_by_uuid_not_found(self):
self.assertRaises(exception.ImageNotFound, db.s3_image_get_by_uuid,
self.ctxt, uuidutils.generate_uuid())
class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(ComputeNodeTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.service_dict = dict(host='host1', binary='binary1',
topic='compute', report_count=1,
disabled=False)
self.service = db.service_create(self.ctxt, self.service_dict)
self.compute_node_dict = dict(vcpus=2, memory_mb=1024, local_gb=2048,
vcpus_used=0, memory_mb_used=0,
local_gb_used=0, free_ram_mb=1024,
free_disk_gb=2048, hypervisor_type="xen",
hypervisor_version=1, cpu_info="",
running_vms=0, current_workload=0,
service_id=self.service['id'],
disk_available_least=100,
hypervisor_hostname='abracadabra104')
# add some random stats
self.stats = dict(num_instances=3, num_proj_12345=2,
num_proj_23456=2, num_vm_building=3)
self.compute_node_dict['stats'] = self.stats
self.flags(reserved_host_memory_mb=0)
self.flags(reserved_host_disk_mb=0)
self.item = db.compute_node_create(self.ctxt, self.compute_node_dict)
def _stats_as_dict(self, stats):
d = {}
for s in stats:
key = s['key']
d[key] = s['value']
return d
def _stats_equal(self, stats, new_stats):
for k, v in stats.iteritems():
self.assertEqual(v, int(new_stats[k]))
def test_compute_node_create(self):
self._assertEqualObjects(self.compute_node_dict, self.item,
ignored_keys=self._ignored_keys + ['stats'])
new_stats = self._stats_as_dict(self.item['stats'])
self._stats_equal(self.stats, new_stats)
def test_compute_node_get_all(self):
nodes = db.compute_node_get_all(self.ctxt)
self.assertEqual(1, len(nodes))
node = nodes[0]
self._assertEqualObjects(self.compute_node_dict, node,
ignored_keys=self._ignored_keys + ['stats', 'service'])
new_stats = self._stats_as_dict(node['stats'])
self._stats_equal(self.stats, new_stats)
def test_compute_node_get(self):
compute_node_id = self.item['id']
node = db.compute_node_get(self.ctxt, compute_node_id)
self._assertEqualObjects(self.compute_node_dict, node,
ignored_keys=self._ignored_keys + ['stats', 'service'])
new_stats = self._stats_as_dict(node['stats'])
self._stats_equal(self.stats, new_stats)
def test_compute_node_update(self):
compute_node_id = self.item['id']
stats = self._stats_as_dict(self.item['stats'])
# change some values:
stats['num_instances'] = 8
stats['num_tribbles'] = 1
values = {
'vcpus': 4,
'stats': stats,
}
item_updated = db.compute_node_update(self.ctxt, compute_node_id,
values)
self.assertEqual(4, item_updated['vcpus'])
new_stats = self._stats_as_dict(item_updated['stats'])
self._stats_equal(stats, new_stats)
def test_compute_node_delete(self):
compute_node_id = self.item['id']
db.compute_node_delete(self.ctxt, compute_node_id)
nodes = db.compute_node_get_all(self.ctxt)
self.assertEqual(len(nodes), 0)
def test_compute_node_search_by_hypervisor(self):
nodes_created = []
new_service = copy.copy(self.service_dict)
for i in xrange(3):
new_service['binary'] += str(i)
new_service['topic'] += str(i)
service = db.service_create(self.ctxt, new_service)
self.compute_node_dict['service_id'] = service['id']
self.compute_node_dict['hypervisor_hostname'] = 'testhost' + str(i)
self.compute_node_dict['stats'] = self.stats
node = db.compute_node_create(self.ctxt, self.compute_node_dict)
nodes_created.append(node)
nodes = db.compute_node_search_by_hypervisor(self.ctxt, 'host')
self.assertEqual(3, len(nodes))
self._assertEqualListsOfObjects(nodes_created, nodes,
ignored_keys=self._ignored_keys + ['stats', 'service'])
def test_compute_node_statistics(self):
stats = db.compute_node_statistics(self.ctxt)
self.assertEqual(stats.pop('count'), 1)
for k, v in stats.iteritems():
self.assertEqual(v, self.item[k])
def test_compute_node_not_found(self):
self.assertRaises(exception.ComputeHostNotFound, db.compute_node_get,
self.ctxt, 100500)
def test_compute_node_update_always_updates_updated_at(self):
item_updated = db.compute_node_update(self.ctxt,
self.item['id'], {})
self.assertNotEqual(self.item['updated_at'],
item_updated['updated_at'])
def test_compute_node_update_override_updated_at(self):
# Update the record once so updated_at is set.
first = db.compute_node_update(self.ctxt, self.item['id'],
{'free_ram_mb': '12'})
self.assertIsNotNone(first['updated_at'])
# Update a second time. Make sure that the updated_at value we send
# is overridden.
second = db.compute_node_update(self.ctxt, self.item['id'],
{'updated_at': first.updated_at,
'free_ram_mb': '13'})
self.assertNotEqual(first['updated_at'], second['updated_at'])
def test_compute_node_stat_unchanged(self):
# don't update unchanged stat values:
stats = self.item['stats']
stats_updated_at = dict([(stat['key'], stat['updated_at'])
for stat in stats])
stats_values = self._stats_as_dict(stats)
new_values = {'stats': stats_values}
compute_node_id = self.item['id']
db.compute_node_update(self.ctxt, compute_node_id, new_values)
updated_node = db.compute_node_get(self.ctxt, compute_node_id)
updated_stats = updated_node['stats']
for stat in updated_stats:
self.assertEqual(stat['updated_at'], stats_updated_at[stat['key']])
def test_compute_node_stat_prune(self):
for stat in self.item['stats']:
if stat['key'] == 'num_instances':
num_instance_stat = stat
break
values = {
'stats': dict(num_instances=1)
}
db.compute_node_update(self.ctxt, self.item['id'], values,
prune_stats=True)
item_updated = db.compute_node_get_all(self.ctxt)[0]
self.assertEqual(1, len(item_updated['stats']))
stat = item_updated['stats'][0]
self.assertEqual(num_instance_stat['id'], stat['id'])
self.assertEqual(num_instance_stat['key'], stat['key'])
self.assertEqual(1, int(stat['value']))
class ProviderFwRuleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ProviderFwRuleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = self._get_rule_values()
self.rules = [db.provider_fw_rule_create(self.ctxt, rule)
for rule in self.values]
def _get_rule_values(self):
cidr_samples = ['192.168.0.0/24', '10.1.2.3/32',
'2001:4f8:3:ba::/64',
'2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128']
values = []
for i in xrange(len(cidr_samples)):
rule = {}
rule['protocol'] = 'foo' + str(i)
rule['from_port'] = 9999 + i
rule['to_port'] = 9898 + i
rule['cidr'] = cidr_samples[i]
values.append(rule)
return values
def test_provider_fw_rule_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for i, rule in enumerate(self.values):
self._assertEqualObjects(self.rules[i], rule,
ignored_keys=ignored_keys)
def test_provider_fw_rule_get_all(self):
self._assertEqualListsOfObjects(self.rules,
db.provider_fw_rule_get_all(self.ctxt))
def test_provider_fw_rule_destroy(self):
for rule in self.rules:
db.provider_fw_rule_destroy(self.ctxt, rule.id)
self.assertEqual([], db.provider_fw_rule_get_all(self.ctxt))
class CertificateTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(CertificateTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.created = self._certificates_create()
def _get_certs_values(self):
base_values = {
'user_id': 'user',
'project_id': 'project',
'file_name': 'filename'
}
return [dict((k, v + str(x)) for k, v in base_values.iteritems())
for x in xrange(1, 4)]
def _certificates_create(self):
return [db.certificate_create(self.ctxt, cert)
for cert in self._get_certs_values()]
def test_certificate_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for i, cert in enumerate(self._get_certs_values()):
self._assertEqualObjects(self.created[i], cert,
ignored_keys=ignored_keys)
def test_certificate_get_all_by_project(self):
cert = db.certificate_get_all_by_project(self.ctxt,
self.created[1].project_id)
self._assertEqualObjects(self.created[1], cert[0])
def test_certificate_get_all_by_user(self):
cert = db.certificate_get_all_by_user(self.ctxt,
self.created[1].user_id)
self._assertEqualObjects(self.created[1], cert[0])
def test_certificate_get_all_by_user_and_project(self):
cert = db.certificate_get_all_by_user_and_project(self.ctxt,
self.created[1].user_id, self.created[1].project_id)
self._assertEqualObjects(self.created[1], cert[0])
class ConsoleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ConsoleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
pools_data = [
{'address': '192.168.10.10',
'username': 'user1',
'password': 'passwd1',
'console_type': 'type1',
'public_hostname': 'public_host1',
'host': 'host1',
'compute_host': 'compute_host1',
},
{'address': '192.168.10.11',
'username': 'user2',
'password': 'passwd2',
'console_type': 'type2',
'public_hostname': 'public_host2',
'host': 'host2',
'compute_host': 'compute_host2',
},
]
console_pools = [db.console_pool_create(self.ctxt, val)
for val in pools_data]
instance_uuid = uuidutils.generate_uuid()
self.console_data = [dict([('instance_name', 'name' + str(x)),
('instance_uuid', instance_uuid),
('password', 'pass' + str(x)),
('port', 7878 + x),
('pool_id', console_pools[x]['id'])])
for x in xrange(len(pools_data))]
self.consoles = [db.console_create(self.ctxt, val)
for val in self.console_data]
def test_console_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for console in self.consoles:
self.assertIsNotNone(console['id'])
self._assertEqualListsOfObjects(self.console_data, self.consoles,
ignored_keys=ignored_keys)
def test_console_get_by_id(self):
console = self.consoles[0]
console_get = db.console_get(self.ctxt, console['id'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_by_id_uuid(self):
console = self.consoles[0]
console_get = db.console_get(self.ctxt, console['id'],
console['instance_uuid'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_by_pool_instance(self):
console = self.consoles[0]
console_get = db.console_get_by_pool_instance(self.ctxt,
console['pool_id'], console['instance_uuid'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_all_by_instance(self):
instance_uuid = self.consoles[0]['instance_uuid']
consoles_get = db.console_get_all_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfObjects(self.consoles, consoles_get)
def test_console_get_all_by_instance_empty(self):
consoles_get = db.console_get_all_by_instance(self.ctxt,
uuidutils.generate_uuid())
self.assertEqual(consoles_get, [])
def test_console_delete(self):
console_id = self.consoles[0]['id']
db.console_delete(self.ctxt, console_id)
self.assertRaises(exception.ConsoleNotFound, db.console_get,
self.ctxt, console_id)
def test_console_get_by_pool_instance_not_found(self):
self.assertRaises(exception.ConsoleNotFoundInPoolForInstance,
db.console_get_by_pool_instance, self.ctxt,
self.consoles[0]['pool_id'],
uuidutils.generate_uuid())
def test_console_get_not_found(self):
self.assertRaises(exception.ConsoleNotFound, db.console_get,
self.ctxt, 100500)
def test_console_get_not_found_instance(self):
self.assertRaises(exception.ConsoleNotFoundForInstance, db.console_get,
self.ctxt, self.consoles[0]['id'],
uuidutils.generate_uuid())
class CellTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(CellTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_cell_base_values(self):
return {
'name': 'myname',
'api_url': 'apiurl',
'transport_url': 'transporturl',
'weight_offset': 0.5,
'weight_scale': 1.5,
'is_parent': True,
}
def _cell_value_modify(self, value, step):
if isinstance(value, str):
return value + str(step)
elif isinstance(value, float):
return value + step + 0.6
elif isinstance(value, bool):
return bool(step % 2)
elif isinstance(value, int):
return value + step
def _create_cells(self):
test_values = []
for x in xrange(1, 4):
modified_val = dict([(k, self._cell_value_modify(v, x))
for k, v in self._get_cell_base_values().iteritems()])
db.cell_create(self.ctxt, modified_val)
test_values.append(modified_val)
return test_values
def test_cell_create(self):
cell = db.cell_create(self.ctxt, self._get_cell_base_values())
self.assertFalse(cell['id'] is None)
self._assertEqualObjects(cell, self._get_cell_base_values(),
ignored_keys=self._ignored_keys)
def test_cell_update(self):
db.cell_create(self.ctxt, self._get_cell_base_values())
new_values = {
'api_url': 'apiurl1',
'transport_url': 'transporturl1',
'weight_offset': 0.6,
'weight_scale': 1.6,
'is_parent': False,
}
test_cellname = self._get_cell_base_values()['name']
db.cell_update(self.ctxt, test_cellname, new_values)
updated_cell = db.cell_get(self.ctxt, test_cellname)
self._assertEqualObjects(updated_cell, new_values,
ignored_keys=self._ignored_keys + ['name'])
def test_cell_delete(self):
new_cells = self._create_cells()
for cell in new_cells:
test_cellname = cell['name']
db.cell_delete(self.ctxt, test_cellname)
self.assertRaises(exception.CellNotFound, db.cell_get, self.ctxt,
test_cellname)
def test_cell_get(self):
new_cells = self._create_cells()
for cell in new_cells:
cell_get = db.cell_get(self.ctxt, cell['name'])
self._assertEqualObjects(cell_get, cell,
ignored_keys=self._ignored_keys)
def test_cell_get_all(self):
new_cells = self._create_cells()
cells = db.cell_get_all(self.ctxt)
self.assertEqual(len(new_cells), len(cells))
cells_byname = dict([(newcell['name'],
newcell) for newcell in new_cells])
for cell in cells:
self._assertEqualObjects(cell, cells_byname[cell['name']],
self._ignored_keys)
def test_cell_get_not_found(self):
self._create_cells()
self.assertRaises(exception.CellNotFound, db.cell_get, self.ctxt,
'cellnotinbase')
def test_cell_update_not_found(self):
self._create_cells()
self.assertRaises(exception.CellNotFound, db.cell_update, self.ctxt,
'cellnotinbase', self._get_cell_base_values())
def test_cell_create_exists(self):
db.cell_create(self.ctxt, self._get_cell_base_values())
self.assertRaises(exception.CellExists, db.cell_create,
self.ctxt, self._get_cell_base_values())
class ConsolePoolTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ConsolePoolTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.test_console_pool_1 = {
'address': '192.168.2.10',
'username': 'user_1',
'password': 'secret_123',
'console_type': 'type_1',
'public_hostname': 'public_hostname_123',
'host': 'localhost',
'compute_host': '127.0.0.1',
}
self.test_console_pool_2 = {
'address': '192.168.2.11',
'username': 'user_2',
'password': 'secret_1234',
'console_type': 'type_2',
'public_hostname': 'public_hostname_1234',
'host': '127.0.0.1',
'compute_host': 'localhost',
}
self.test_console_pool_3 = {
'address': '192.168.2.12',
'username': 'user_3',
'password': 'secret_12345',
'console_type': 'type_2',
'public_hostname': 'public_hostname_12345',
'host': '127.0.0.1',
'compute_host': '192.168.1.1',
}
def test_console_pool_create(self):
console_pool = db.console_pool_create(
self.ctxt, self.test_console_pool_1)
self.assertTrue(console_pool.get('id') is not None)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(
console_pool, self.test_console_pool_1, ignored_keys)
def test_console_pool_create_duplicate(self):
db.console_pool_create(self.ctxt, self.test_console_pool_1)
self.assertRaises(exception.ConsolePoolExists, db.console_pool_create,
self.ctxt, self.test_console_pool_1)
def test_console_pool_get_by_host_type(self):
params = [
self.test_console_pool_1,
self.test_console_pool_2,
]
for p in params:
db.console_pool_create(self.ctxt, p)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id', 'consoles']
cp = self.test_console_pool_1
db_cp = db.console_pool_get_by_host_type(
self.ctxt, cp['compute_host'], cp['host'], cp['console_type']
)
self._assertEqualObjects(cp, db_cp, ignored_keys)
def test_console_pool_get_by_host_type_no_resuls(self):
self.assertRaises(
exception.ConsolePoolNotFoundForHostType,
db.console_pool_get_by_host_type, self.ctxt, 'compute_host',
'host', 'console_type')
def test_console_pool_get_all_by_host_type(self):
params = [
self.test_console_pool_1,
self.test_console_pool_2,
self.test_console_pool_3,
]
for p in params:
db.console_pool_create(self.ctxt, p)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id', 'consoles']
cp = self.test_console_pool_2
db_cp = db.console_pool_get_all_by_host_type(
self.ctxt, cp['host'], cp['console_type'])
self._assertEqualListsOfObjects(
db_cp, [self.test_console_pool_2, self.test_console_pool_3],
ignored_keys)
def test_console_pool_get_all_by_host_type_no_results(self):
res = db.console_pool_get_all_by_host_type(
self.ctxt, 'cp_host', 'cp_console_type')
self.assertEqual([], res)
class DnsdomainTestCase(test.TestCase):
def setUp(self):
super(DnsdomainTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.domain = 'test.domain'
self.testzone = 'testzone'
self.project = 'fake'
def test_dnsdomain_register_for_zone(self):
db.dnsdomain_register_for_zone(self.ctxt, self.domain, self.testzone)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertEqual(domain['domain'], self.domain)
self.assertEqual(domain['availability_zone'], self.testzone)
self.assertEqual(domain['scope'], 'private')
def test_dnsdomain_register_for_project(self):
db.dnsdomain_register_for_project(self.ctxt, self.domain, self.project)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertEqual(domain['domain'], self.domain)
self.assertEqual(domain['project_id'], self.project)
self.assertEqual(domain['scope'], 'public')
def test_dnsdomain_list(self):
d_list = ['test.domain.one', 'test.domain.two']
db.dnsdomain_register_for_zone(self.ctxt, d_list[0], self.testzone)
db.dnsdomain_register_for_project(self.ctxt, d_list[1], self.project)
db_list = db.dnsdomain_list(self.ctxt)
self.assertEqual(sorted(d_list), sorted(db_list))
def test_dnsdomain_unregister(self):
db.dnsdomain_register_for_zone(self.ctxt, self.domain, self.testzone)
db.dnsdomain_unregister(self.ctxt, self.domain)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertIsNone(domain)
class BwUsageTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(BwUsageTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.useFixture(test.TimeOverride())
def test_bw_usage_get_by_uuids(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
uuid3_refreshed = now - datetime.timedelta(seconds=5)
expected_bw_usages = [{'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now},
{'uuid': 'fake_uuid2',
'mac': 'fake_mac2',
'start_period': start_period,
'bw_in': 200,
'bw_out': 300,
'last_ctr_in': 22345,
'last_ctr_out': 77890,
'last_refreshed': now},
{'uuid': 'fake_uuid3',
'mac': 'fake_mac3',
'start_period': start_period,
'bw_in': 400,
'bw_out': 500,
'last_ctr_in': 32345,
'last_ctr_out': 87890,
'last_refreshed': uuid3_refreshed}]
bw_usages = db.bw_usage_get_by_uuids(self.ctxt,
['fake_uuid1', 'fake_uuid2'], start_period)
# No matches
self.assertEqual(len(bw_usages), 0)
# Add 3 entries
db.bw_usage_update(self.ctxt, 'fake_uuid1',
'fake_mac1', start_period,
100, 200, 12345, 67890)
db.bw_usage_update(self.ctxt, 'fake_uuid2',
'fake_mac2', start_period,
100, 200, 42, 42)
# Test explicit refreshed time
db.bw_usage_update(self.ctxt, 'fake_uuid3',
'fake_mac3', start_period,
400, 500, 32345, 87890,
last_refreshed=uuid3_refreshed)
# Update 2nd entry
db.bw_usage_update(self.ctxt, 'fake_uuid2',
'fake_mac2', start_period,
200, 300, 22345, 77890)
bw_usages = db.bw_usage_get_by_uuids(self.ctxt,
['fake_uuid1', 'fake_uuid2', 'fake_uuid3'], start_period)
self.assertEqual(len(bw_usages), 3)
for i, expected in enumerate(expected_bw_usages):
self._assertEqualObjects(bw_usages[i], expected,
ignored_keys=self._ignored_keys)
def test_bw_usage_get(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
expected_bw_usage = {'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now}
bw_usage = db.bw_usage_get(self.ctxt, 'fake_uuid1', start_period,
'fake_mac1')
self.assertIsNone(bw_usage)
db.bw_usage_update(self.ctxt, 'fake_uuid1',
'fake_mac1', start_period,
100, 200, 12345, 67890)
bw_usage = db.bw_usage_get(self.ctxt, 'fake_uuid1', start_period,
'fake_mac1')
self._assertEqualObjects(bw_usage, expected_bw_usage,
ignored_keys=self._ignored_keys)
class Ec2TestCase(test.TestCase):
def setUp(self):
super(Ec2TestCase, self).setUp()
self.ctxt = context.RequestContext('fake_user', 'fake_project')
def test_ec2_ids_not_found_are_printable(self):
def check_exc_format(method, value):
try:
method(self.ctxt, value)
except exception.NotFound as exc:
self.assertTrue(unicode(value) in unicode(exc))
check_exc_format(db.get_ec2_volume_id_by_uuid, 'fake')
check_exc_format(db.get_volume_uuid_by_ec2_id, 123456)
check_exc_format(db.get_ec2_snapshot_id_by_uuid, 'fake')
check_exc_format(db.get_snapshot_uuid_by_ec2_id, 123456)
check_exc_format(db.get_ec2_instance_id_by_uuid, 'fake')
check_exc_format(db.get_instance_uuid_by_ec2_id, 123456)
def test_ec2_volume_create(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(vol['id'])
self.assertEqual(vol['uuid'], 'fake-uuid')
def test_get_ec2_volume_id_by_uuid(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
vol_id = db.get_ec2_volume_id_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(vol['id'], vol_id)
def test_get_volume_uuid_by_ec2_id(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
vol_uuid = db.get_volume_uuid_by_ec2_id(self.ctxt, vol['id'])
self.assertEqual(vol_uuid, 'fake-uuid')
def test_get_ec2_volume_id_by_uuid_not_found(self):
self.assertRaises(exception.VolumeNotFound,
db.get_ec2_volume_id_by_uuid,
self.ctxt, 'uuid-not-present')
def test_get_volume_uuid_by_ec2_id_not_found(self):
self.assertRaises(exception.VolumeNotFound,
db.get_volume_uuid_by_ec2_id,
self.ctxt, 100500)
def test_ec2_snapshot_create(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(snap['id'])
self.assertEqual(snap['uuid'], 'fake-uuid')
def test_get_ec2_snapshot_id_by_uuid(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
snap_id = db.get_ec2_snapshot_id_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(snap['id'], snap_id)
def test_get_snapshot_uuid_by_ec2_id(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
snap_uuid = db.get_snapshot_uuid_by_ec2_id(self.ctxt, snap['id'])
self.assertEqual(snap_uuid, 'fake-uuid')
def test_get_ec2_snapshot_id_by_uuid_not_found(self):
self.assertRaises(exception.SnapshotNotFound,
db.get_ec2_snapshot_id_by_uuid,
self.ctxt, 'uuid-not-present')
def test_get_snapshot_uuid_by_ec2_id_not_found(self):
self.assertRaises(exception.SnapshotNotFound,
db.get_snapshot_uuid_by_ec2_id,
self.ctxt, 100500)
def test_ec2_instance_create(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(inst['id'])
self.assertEqual(inst['uuid'], 'fake-uuid')
def test_get_ec2_instance_id_by_uuid(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst_id = db.get_ec2_instance_id_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(inst['id'], inst_id)
def test_get_instance_uuid_by_ec2_id(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst_uuid = db.get_instance_uuid_by_ec2_id(self.ctxt, inst['id'])
self.assertEqual(inst_uuid, 'fake-uuid')
def test_get_ec2_instance_id_by_uuid_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.get_ec2_instance_id_by_uuid,
self.ctxt, 'uuid-not-present')
def test_get_instance_uuid_by_ec2_id_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.get_instance_uuid_by_ec2_id,
self.ctxt, 100500)
class ArchiveTestCase(test.TestCase):
def setUp(self):
super(ArchiveTestCase, self).setUp()
self.context = context.get_admin_context()
self.engine = get_engine()
self.conn = self.engine.connect()
self.instance_id_mappings = db_utils.get_table(self.engine,
"instance_id_mappings")
self.shadow_instance_id_mappings = db_utils.get_table(self.engine,
"shadow_instance_id_mappings")
self.dns_domains = db_utils.get_table(self.engine, "dns_domains")
self.shadow_dns_domains = db_utils.get_table(self.engine,
"shadow_dns_domains")
self.consoles = db_utils.get_table(self.engine, "consoles")
self.console_pools = db_utils.get_table(self.engine, "console_pools")
self.shadow_consoles = db_utils.get_table(self.engine,
"shadow_consoles")
self.shadow_console_pools = db_utils.get_table(self.engine,
"shadow_console_pools")
self.instances = db_utils.get_table(self.engine, "instances")
self.shadow_instances = db_utils.get_table(self.engine,
"shadow_instances")
self.uuidstrs = []
for unused in range(6):
self.uuidstrs.append(stdlib_uuid.uuid4().hex)
self.ids = []
self.id_tablenames_to_cleanup = set(["console_pools", "consoles"])
self.uuid_tablenames_to_cleanup = set(["instance_id_mappings",
"instances"])
self.domain_tablenames_to_cleanup = set(["dns_domains"])
def tearDown(self):
super(ArchiveTestCase, self).tearDown()
for tablename in self.id_tablenames_to_cleanup:
for name in [tablename, "shadow_" + tablename]:
table = db_utils.get_table(self.engine, name)
del_statement = table.delete(table.c.id.in_(self.ids))
self.conn.execute(del_statement)
for tablename in self.uuid_tablenames_to_cleanup:
for name in [tablename, "shadow_" + tablename]:
table = db_utils.get_table(self.engine, name)
del_statement = table.delete(table.c.uuid.in_(self.uuidstrs))
self.conn.execute(del_statement)
for tablename in self.domain_tablenames_to_cleanup:
for name in [tablename, "shadow_" + tablename]:
table = db_utils.get_table(self.engine, name)
del_statement = table.delete(table.c.domain.in_(self.uuidstrs))
self.conn.execute(del_statement)
def test_shadow_tables(self):
metadata = MetaData(bind=self.engine)
metadata.reflect()
for table_name in metadata.tables:
if table_name.startswith("shadow_"):
self.assertIn(table_name[7:], metadata.tables)
continue
self.assertTrue(db_utils.check_shadow_table(self.engine,
table_name))
def test_archive_deleted_rows(self):
# Add 6 rows to table
for uuidstr in self.uuidstrs:
ins_stmt = self.instance_id_mappings.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt)
# Set 4 to deleted
update_statement = self.instance_id_mappings.update().\
where(self.instance_id_mappings.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
qiim = select([self.instance_id_mappings]).where(self.
instance_id_mappings.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qiim).fetchall()
# Verify we have 6 in main
self.assertEqual(len(rows), 6)
qsiim = select([self.shadow_instance_id_mappings]).\
where(self.shadow_instance_id_mappings.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 0 in shadow
self.assertEqual(len(rows), 0)
# Archive 2 rows
db.archive_deleted_rows(self.context, max_rows=2)
rows = self.conn.execute(qiim).fetchall()
# Verify we have 4 left in main
self.assertEqual(len(rows), 4)
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 2 in shadow
self.assertEqual(len(rows), 2)
# Archive 2 more rows
db.archive_deleted_rows(self.context, max_rows=2)
rows = self.conn.execute(qiim).fetchall()
# Verify we have 2 left in main
self.assertEqual(len(rows), 2)
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 4 in shadow
self.assertEqual(len(rows), 4)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows(self.context, max_rows=2)
rows = self.conn.execute(qiim).fetchall()
# Verify we still have 2 left in main
self.assertEqual(len(rows), 2)
rows = self.conn.execute(qsiim).fetchall()
# Verify we still have 4 in shadow
self.assertEqual(len(rows), 4)
def test_archive_deleted_rows_for_every_uuid_table(self):
tablenames = []
for model_class in models.__dict__.itervalues():
if hasattr(model_class, "__tablename__"):
tablenames.append(model_class.__tablename__)
tablenames.sort()
for tablename in tablenames:
ret = self._test_archive_deleted_rows_for_one_uuid_table(tablename)
if ret == 0:
self.uuid_tablenames_to_cleanup.add(tablename)
def _test_archive_deleted_rows_for_one_uuid_table(self, tablename):
"""
:returns: 0 on success, 1 if no uuid column, 2 if insert failed
"""
main_table = db_utils.get_table(self.engine, tablename)
if not hasattr(main_table.c, "uuid"):
# Not a uuid table, so skip it.
return 1
shadow_table = db_utils.get_table(self.engine, "shadow_" + tablename)
# Add 6 rows to table
for uuidstr in self.uuidstrs:
ins_stmt = main_table.insert().values(uuid=uuidstr)
try:
self.conn.execute(ins_stmt)
except IntegrityError:
# This table has constraints that require a table-specific
# insert, so skip it.
return 2
# Set 4 to deleted
update_statement = main_table.update().\
where(main_table.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
qmt = select([main_table]).where(main_table.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qmt).fetchall()
# Verify we have 6 in main
self.assertEqual(len(rows), 6)
qst = select([shadow_table]).\
where(shadow_table.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qst).fetchall()
# Verify we have 0 in shadow
self.assertEqual(len(rows), 0)
# Archive 2 rows
db.archive_deleted_rows_for_table(self.context, tablename, max_rows=2)
# Verify we have 4 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 4)
# Verify we have 2 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 2)
# Archive 2 more rows
db.archive_deleted_rows_for_table(self.context, tablename, max_rows=2)
# Verify we have 2 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 2)
# Verify we have 4 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 4)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows_for_table(self.context, tablename, max_rows=2)
# Verify we still have 2 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 2)
# Verify we still have 4 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 4)
return 0
def test_archive_deleted_rows_no_id_column(self):
uuidstr0 = self.uuidstrs[0]
ins_stmt = self.dns_domains.insert().values(domain=uuidstr0)
self.conn.execute(ins_stmt)
update_statement = self.dns_domains.update().\
where(self.dns_domains.c.domain == uuidstr0).\
values(deleted=1)
self.conn.execute(update_statement)
qdd = select([self.dns_domains], self.dns_domains.c.domain ==
uuidstr0)
rows = self.conn.execute(qdd).fetchall()
self.assertEqual(len(rows), 1)
qsdd = select([self.shadow_dns_domains],
self.shadow_dns_domains.c.domain == uuidstr0)
rows = self.conn.execute(qsdd).fetchall()
self.assertEqual(len(rows), 0)
db.archive_deleted_rows(self.context, max_rows=1)
rows = self.conn.execute(qdd).fetchall()
self.assertEqual(len(rows), 0)
rows = self.conn.execute(qsdd).fetchall()
self.assertEqual(len(rows), 1)
def test_archive_deleted_rows_fk_constraint(self):
# consoles.pool_id depends on console_pools.id
# SQLite doesn't enforce foreign key constraints without a pragma.
dialect = self.engine.url.get_dialect()
if dialect == sqlite.dialect:
# We're seeing issues with foreign key support in SQLite 3.6.20
# SQLAlchemy doesn't support it at all with < SQLite 3.6.19
# It works fine in SQLite 3.7.
# So return early to skip this test if running SQLite < 3.7
import sqlite3
tup = sqlite3.sqlite_version_info
if tup[0] < 3 or (tup[0] == 3 and tup[1] < 7):
self.skipTest(
'sqlite version too old for reliable SQLA foreign_keys')
self.conn.execute("PRAGMA foreign_keys = ON")
ins_stmt = self.console_pools.insert().values(deleted=1)
result = self.conn.execute(ins_stmt)
id1 = result.inserted_primary_key[0]
self.ids.append(id1)
ins_stmt = self.consoles.insert().values(deleted=1,
pool_id=id1)
result = self.conn.execute(ins_stmt)
id2 = result.inserted_primary_key[0]
self.ids.append(id2)
# The first try to archive console_pools should fail, due to FK.
num = db.archive_deleted_rows_for_table(self.context, "console_pools")
self.assertEqual(num, 0)
# Then archiving consoles should work.
num = db.archive_deleted_rows_for_table(self.context, "consoles")
self.assertEqual(num, 1)
# Then archiving console_pools should work.
num = db.archive_deleted_rows_for_table(self.context, "console_pools")
self.assertEqual(num, 1)
def test_archive_deleted_rows_2_tables(self):
# Add 6 rows to each table
for uuidstr in self.uuidstrs:
ins_stmt = self.instance_id_mappings.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt)
ins_stmt2 = self.instances.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt2)
# Set 4 of each to deleted
update_statement = self.instance_id_mappings.update().\
where(self.instance_id_mappings.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
update_statement2 = self.instances.update().\
where(self.instances.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement2)
# Verify we have 6 in each main table
qiim = select([self.instance_id_mappings]).where(
self.instance_id_mappings.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qiim).fetchall()
self.assertEqual(len(rows), 6)
qi = select([self.instances]).where(self.instances.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(rows), 6)
# Verify we have 0 in each shadow table
qsiim = select([self.shadow_instance_id_mappings]).\
where(self.shadow_instance_id_mappings.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qsiim).fetchall()
self.assertEqual(len(rows), 0)
qsi = select([self.shadow_instances]).\
where(self.shadow_instances.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(rows), 0)
# Archive 7 rows, which should be 4 in one table and 3 in the other.
db.archive_deleted_rows(self.context, max_rows=7)
# Verify we have 5 left in the two main tables combined
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 5)
# Verify we have 7 in the two shadow tables combined.
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 7)
# Archive the remaining deleted rows.
db.archive_deleted_rows(self.context, max_rows=1)
# Verify we have 4 total left in both main tables.
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 4)
# Verify we have 8 in shadow
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 8)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows(self.context, max_rows=500)
# Verify we have 4 total left in both main tables.
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 4)
# Verify we have 8 in shadow
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 8)
class InstanceGroupDBApiTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(InstanceGroupDBApiTestCase, self).setUp()
self.user_id = 'fake_user'
self.project_id = 'fake_project'
self.context = context.RequestContext(self.user_id, self.project_id)
def _get_default_values(self):
return {'name': 'fake_name',
'user_id': self.user_id,
'project_id': self.project_id}
def _create_instance_group(self, context, values, policies=None,
metadata=None, members=None):
return db.instance_group_create(context, values, policies=policies,
metadata=metadata, members=members)
def test_instance_group_create_no_key(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
def test_instance_group_create_with_key(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
def test_instance_group_create_with_same_key(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
self.assertRaises(exception.InstanceGroupIdExists,
self._create_instance_group, self.context, values)
def test_instance_group_get(self):
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
result2 = db.instance_group_get(self.context, result1['uuid'])
self._assertEqualObjects(result1, result2)
def test_instance_group_update_simple(self):
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
values = {'name': 'new_name', 'user_id': 'new_user',
'project_id': 'new_project'}
db.instance_group_update(self.context, result1['uuid'],
values)
result2 = db.instance_group_get(self.context, result1['uuid'])
self.assertEquals(result1['uuid'], result2['uuid'])
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result2, values, ignored_keys)
def test_instance_group_delete(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
db.instance_group_delete(self.context, result['uuid'])
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_get, self.context, result['uuid'])
def test_instance_group_get_all(self):
groups = db.instance_group_get_all(self.context)
self.assertEquals(0, len(groups))
value = self._get_default_values()
result1 = self._create_instance_group(self.context, value)
groups = db.instance_group_get_all(self.context)
self.assertEquals(1, len(groups))
value = self._get_default_values()
result2 = self._create_instance_group(self.context, value)
groups = db.instance_group_get_all(self.context)
results = [result1, result2]
self._assertEqualListsOfObjects(results, groups)
def test_instance_group_get_all_by_project_id(self):
groups = db.instance_group_get_all_by_project_id(self.context,
'invalid_project_id')
self.assertEquals(0, len(groups))
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
groups = db.instance_group_get_all_by_project_id(self.context,
'fake_project')
self.assertEquals(1, len(groups))
values = self._get_default_values()
values['project_id'] = 'new_project_id'
result2 = self._create_instance_group(self.context, values)
groups = db.instance_group_get_all(self.context)
results = [result1, result2]
self._assertEqualListsOfObjects(results, groups)
projects = [{'name': 'fake_project', 'value': [result1]},
{'name': 'new_project_id', 'value': [result2]}]
for project in projects:
groups = db.instance_group_get_all_by_project_id(self.context,
project['name'])
self._assertEqualListsOfObjects(project['value'], groups)
def test_instance_group_update(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
id = result['uuid']
values = self._get_default_values()
values['name'] = 'new_fake_name'
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self.assertEquals(result['name'], 'new_fake_name')
# update metadata
values = self._get_default_values()
metadataInput = {'key11': 'value1',
'key12': 'value2'}
values['metadata'] = metadataInput
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
metadata = result['metadetails']
self._assertEqualObjects(metadata, metadataInput)
# update update members
values = self._get_default_values()
members = ['instance_id1', 'instance_id2']
values['members'] = members
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(result['members'], members)
# update update policies
values = self._get_default_values()
policies = ['policy1', 'policy2']
values['policies'] = policies
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(result['policies'], policies)
# test invalid ID
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_update, self.context,
'invalid_id', values)
class InstanceGroupMetadataDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_metadata_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
metadata = {'key11': 'value1',
'key12': 'value2'}
result = self._create_instance_group(self.context, values,
metadata=metadata)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualObjects(metadata, result['metadetails'])
def test_instance_group_metadata_add(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
metadata = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, {})
metadata = {'key1': 'value1',
'key2': 'value2'}
db.instance_group_metadata_add(self.context, id, metadata)
metadata2 = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, metadata2)
def test_instance_group_update(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
metadata = {'key1': 'value1',
'key2': 'value2'}
db.instance_group_metadata_add(self.context, id, metadata)
metadata2 = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, metadata2)
# check add with existing keys
metadata = {'key1': 'value1',
'key2': 'value2',
'key3': 'value3'}
db.instance_group_metadata_add(self.context, id, metadata)
metadata3 = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, metadata3)
def test_instance_group_delete(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
metadata = {'key1': 'value1',
'key2': 'value2',
'key3': 'value3'}
db.instance_group_metadata_add(self.context, id, metadata)
metadata3 = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, metadata3)
db.instance_group_metadata_delete(self.context, id, 'key1')
metadata = db.instance_group_metadata_get(self.context, id)
self.assertTrue('key1' not in metadata)
db.instance_group_metadata_delete(self.context, id, 'key2')
metadata = db.instance_group_metadata_get(self.context, id)
self.assertTrue('key2' not in metadata)
def test_instance_group_metadata_invalid_ids(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
id = result['uuid']
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_metadata_get,
self.context, 'invalid')
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_metadata_delete, self.context,
'invalidid', 'key1')
metadata = {'key1': 'value1',
'key2': 'value2'}
db.instance_group_metadata_add(self.context, id, metadata)
self.assertRaises(exception.InstanceGroupMetadataNotFound,
db.instance_group_metadata_delete,
self.context, id, 'invalidkey')
class InstanceGroupMembersDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_members_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
members = ['instance_id1', 'instance_id2']
result = self._create_instance_group(self.context, values,
members=members)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualListsOfPrimitivesAsSets(result['members'], members)
def test_instance_group_members_add(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members = db.instance_group_members_get(self.context, id)
self.assertEquals(members, [])
members2 = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members2)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members2)
def test_instance_group_members_update(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members2 = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members2)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members2)
# check add with existing keys
members3 = ['instance_id1', 'instance_id2', 'instance_id3']
db.instance_group_members_add(self.context, id, members3)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
def test_instance_group_members_delete(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members3 = ['instance_id1', 'instance_id2', 'instance_id3']
db.instance_group_members_add(self.context, id, members3)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
for instance_id in members3[:]:
db.instance_group_member_delete(self.context, id, instance_id)
members3.remove(instance_id)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
def test_instance_group_members_invalid_ids(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
id = result['uuid']
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_members_get,
self.context, 'invalid')
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_member_delete, self.context,
'invalidid', 'instance_id1')
members = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members)
self.assertRaises(exception.InstanceGroupMemberNotFound,
db.instance_group_member_delete,
self.context, id, 'invalid_id')
class InstanceGroupPoliciesDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_policies_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
policies = ['policy1', 'policy2']
result = self._create_instance_group(self.context, values,
policies=policies)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualListsOfPrimitivesAsSets(result['policies'], policies)
def test_instance_group_policies_add(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
policies = db.instance_group_policies_get(self.context, id)
self.assertEquals(policies, [])
policies2 = ['policy1', 'policy2']
db.instance_group_policies_add(self.context, id, policies2)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies2)
def test_instance_group_policies_update(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
policies2 = ['policy1', 'policy2']
db.instance_group_policies_add(self.context, id, policies2)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies2)
policies3 = ['policy1', 'policy2', 'policy3']
db.instance_group_policies_add(self.context, id, policies3)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies3)
def test_instance_group_policies_delete(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
policies3 = ['policy1', 'policy2', 'policy3']
db.instance_group_policies_add(self.context, id, policies3)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies3)
for policy in policies3[:]:
db.instance_group_policy_delete(self.context, id, policy)
policies3.remove(policy)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies3)
def test_instance_group_policies_invalid_ids(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
id = result['uuid']
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_policies_get,
self.context, 'invalid')
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_policy_delete, self.context,
'invalidid', 'policy1')
policies = ['policy1', 'policy2']
db.instance_group_policies_add(self.context, id, policies)
self.assertRaises(exception.InstanceGroupPolicyNotFound,
db.instance_group_policy_delete,
self.context, id, 'invalid_policy')
| {
"content_hash": "0df6fe6acae02d60fa77040df0edda16",
"timestamp": "",
"source": "github",
"line_count": 6425,
"max_line_length": 79,
"avg_line_length": 45.208560311284046,
"alnum_prop": 0.5661336133441206,
"repo_name": "plumgrid/plumgrid-nova",
"id": "d766181863a9604fb65f76ba326b70f7135c5f8f",
"size": "291260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/tests/db/test_db_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "11944269"
},
{
"name": "Shell",
"bytes": "17148"
}
],
"symlink_target": ""
} |
import datetime
from django.db import connections
from django.db.models import expressions
from django.db.models.sql import query as django_query
from django.db.models.sql.where import AND, OR, WhereNode
from django.test import TestCase
from django.utils import timezone
from ldapdb import escape_ldap_filter, models
from ldapdb.backends.ldap import compiler as ldapdb_compiler
from ldapdb.models import fields
UTC = timezone.utc
UTC_PLUS_ONE = timezone.get_fixed_timezone(60)
UTC_MINUS_2_HALF = timezone.get_fixed_timezone(-150)
class FakeModel(models.Model):
class Meta:
abstract = True
base_dn = 'ou=test,dc=example,dc=org'
object_classes = ['inetOrgPerson']
name = fields.CharField(db_column='cn')
class DateTimeTests(TestCase):
CONVERSIONS = {
'': None,
'20180102030405.067874Z': datetime.datetime(2018, 1, 2, 3, 4, 5, 67874, tzinfo=UTC),
# Sub-microsecond is ignored by Python
'20180102030405.067874846Z': datetime.datetime(2018, 1, 2, 3, 4, 5, 67874, tzinfo=UTC),
# Sub-hour precision is optional
'2018010203Z': datetime.datetime(2018, 1, 2, 3, tzinfo=UTC),
# Support UTC offsets
'201801020304+0100': datetime.datetime(2018, 1, 2, 3, 4, tzinfo=UTC_PLUS_ONE),
# Minutes are optional for UTC offsets
'201801020304+01': datetime.datetime(2018, 1, 2, 3, 4, tzinfo=UTC_PLUS_ONE),
# Check negative offsets
'201801020304-0230': datetime.datetime(2018, 1, 2, 3, 4, tzinfo=UTC_MINUS_2_HALF),
}
def test_conversions(self):
for raw, expected in sorted(self.CONVERSIONS.items()):
converted = fields.datetime_from_ldap(raw)
self.assertEqual(
expected,
converted,
"Mismatch for %r: expected=%r, got=%r" % (raw, expected, converted),
)
class TimestampTests(TestCase):
CONVERSIONS = {
0: datetime.datetime(1970, 1, 1, tzinfo=UTC),
1530139989: datetime.datetime(2018, 6, 27, 22, 53, 9, tzinfo=UTC),
}
def test_conversions(self):
for raw, expected in sorted(self.CONVERSIONS.items()):
converted = fields.datetime_from_timestamp(raw)
self.assertEqual(
expected,
converted,
"Mismatch for %r: expected=%r, got=%r" % (raw, expected, converted),
)
retro_converted = fields.timestamp_from_datetime(converted)
self.assertEqual(
raw,
retro_converted,
"Mismatch for %r: expected=%r, got=%r" % (raw, raw, retro_converted),
)
class WhereTestCase(TestCase):
def _build_lookup(self, field_name, lookup, value, field=fields.CharField):
fake_field = field()
fake_field.set_attributes_from_name(field_name)
lhs = expressions.Col('faketable', fake_field, fake_field)
lookup = lhs.get_lookup(lookup)
return lookup(lhs, value)
def _where_as_ldap(self, where):
query = django_query.Query(model=FakeModel)
compiler = ldapdb_compiler.SQLCompiler(
query=query,
connection=connections['ldap'],
using=None,
)
pattern, params = compiler.compile(where)
return '(%s)' % (pattern % tuple(escape_ldap_filter(param) for param in params))
def test_escape(self):
self.assertEqual(escape_ldap_filter(u'fôöbàr'), u'fôöbàr')
self.assertEqual(escape_ldap_filter('foo*bar'), 'foo\\2abar')
self.assertEqual(escape_ldap_filter('foo(bar'), 'foo\\28bar')
self.assertEqual(escape_ldap_filter('foo)bar'), 'foo\\29bar')
self.assertEqual(escape_ldap_filter('foo\\bar'), 'foo\\5cbar')
self.assertEqual(escape_ldap_filter('foo\\bar*wiz'), 'foo\\5cbar\\2awiz')
def test_char_field_max_length(self):
self.assertEqual(fields.CharField(max_length=42).max_length, 42)
def test_char_field_exact(self):
where = WhereNode()
where.add(self._build_lookup('cn', 'exact', "test"), AND)
self.assertEqual(self._where_as_ldap(where), "(cn=test)")
where = WhereNode()
where.add(self._build_lookup('cn', 'exact', "(test)"), AND)
self.assertEqual(self._where_as_ldap(where), "(cn=\\28test\\29)")
def test_char_field_in(self):
where = WhereNode()
where.add(self._build_lookup("cn", 'in', ["foo", "bar"]), AND)
self.assertEqual(self._where_as_ldap(where), "(|(cn=foo)(cn=bar))")
where = WhereNode()
where.add(self._build_lookup("cn", 'in', ["(foo)", "(bar)"]), AND)
self.assertEqual(self._where_as_ldap(where), "(|(cn=\\28foo\\29)(cn=\\28bar\\29))")
def test_char_field_startswith(self):
where = WhereNode()
where.add(self._build_lookup("cn", 'startswith', "test"), AND)
self.assertEqual(self._where_as_ldap(where), "(cn=test*)")
where = WhereNode()
where.add(self._build_lookup("cn", 'startswith', "te*st"), AND)
self.assertEqual(self._where_as_ldap(where), "(cn=te\\2ast*)")
def test_char_field_endswith(self):
where = WhereNode()
where.add(self._build_lookup("cn", 'endswith', "test"), AND)
self.assertEqual(self._where_as_ldap(where), "(cn=*test)")
where = WhereNode()
where.add(self._build_lookup("cn", 'endswith', "te*st"), AND)
self.assertEqual(self._where_as_ldap(where), "(cn=*te\\2ast)")
def test_char_field_contains(self):
where = WhereNode()
where.add(self._build_lookup("cn", 'contains', "test"), AND)
self.assertEqual(self._where_as_ldap(where), "(cn=*test*)")
where = WhereNode()
where.add(self._build_lookup("cn", 'contains', "te*st"), AND)
self.assertEqual(self._where_as_ldap(where), "(cn=*te\\2ast*)")
def test_integer_field(self):
where = WhereNode()
where.add(self._build_lookup("uid", 'exact', 1, field=fields.IntegerField), AND)
self.assertEqual(self._where_as_ldap(where), "(uid=1)")
where = WhereNode()
where.add(self._build_lookup("uid", 'gte', 1, field=fields.IntegerField), AND)
self.assertEqual(self._where_as_ldap(where), "(uid>=1)")
where = WhereNode()
where.add(self._build_lookup("uid", 'lte', 1, field=fields.IntegerField), AND)
self.assertEqual(self._where_as_ldap(where), "(uid<=1)")
where = WhereNode()
where.add(self._build_lookup("uid", 'in', [1, 2], field=fields.IntegerField), AND)
self.assertEqual(self._where_as_ldap(where), "(|(uid=1)(uid=2))")
def test_float_field(self):
where = WhereNode()
where.add(self._build_lookup("uid", 'exact', 1.2, field=fields.FloatField), AND)
self.assertEqual(self._where_as_ldap(where), "(uid=1.2)")
where = WhereNode()
where.add(self._build_lookup("uid", 'gte', 1.2, field=fields.FloatField), AND)
self.assertEqual(self._where_as_ldap(where), "(uid>=1.2)")
where = WhereNode()
where.add(self._build_lookup("uid", 'lte', 1.2, field=fields.FloatField), AND)
self.assertEqual(self._where_as_ldap(where), "(uid<=1.2)")
def test_boolean_field(self):
where = WhereNode()
where.add(self._build_lookup("isSuperuser", 'exact', True, field=fields.BooleanField), AND)
self.assertEqual(self._where_as_ldap(where), "(isSuperuser=TRUE)")
where = WhereNode()
where.add(self._build_lookup("isSuperuser", 'exact', False, field=fields.BooleanField), AND)
self.assertEqual(self._where_as_ldap(where), "(isSuperuser=FALSE)")
where = WhereNode()
where.add(self._build_lookup("isSuperuser", 'exact', 1, field=fields.BooleanField), AND)
self.assertEqual(self._where_as_ldap(where), "(isSuperuser=TRUE)")
where = WhereNode()
where.add(self._build_lookup("isSuperuser", 'exact', 0, field=fields.BooleanField), AND)
self.assertEqual(self._where_as_ldap(where), "(isSuperuser=FALSE)")
def test_list_field_contains(self):
where = WhereNode()
where.add(self._build_lookup("memberUid", 'contains', 'foouser', field=fields.ListField), AND)
self.assertEqual(self._where_as_ldap(where), "(memberUid=foouser)")
where = WhereNode()
where.add(self._build_lookup("memberUid", 'contains', '(foouser)', field=fields.ListField), AND)
self.assertEqual(self._where_as_ldap(where), "(memberUid=\\28foouser\\29)")
def test_date_field(self):
where = WhereNode()
where.add(self._build_lookup("birthday", 'exact', '2013-09-03', field=fields.DateField), AND)
self.assertEqual(self._where_as_ldap(where), "(birthday=2013-09-03)")
def test_datetime_field(self):
dt = datetime.datetime(2018, 6, 25, 20, 21, 22, tzinfo=UTC)
where = WhereNode()
where.add(self._build_lookup("modifyTimestamp", 'exact', dt, field=fields.DateTimeField,), AND)
self.assertEqual(self._where_as_ldap(where), "(modifyTimestamp=20180625202122.000000Z)")
where = WhereNode()
where.add(self._build_lookup("modifyTimestamp", 'lte', dt, field=fields.DateTimeField,), AND)
self.assertEqual(self._where_as_ldap(where), "(modifyTimestamp<=20180625202122.000000Z)")
where = WhereNode()
where.add(self._build_lookup("modifyTimestamp", 'gte', dt, field=fields.DateTimeField,), AND)
self.assertEqual(self._where_as_ldap(where), "(modifyTimestamp>=20180625202122.000000Z)")
def test_timestamp_field(self):
dt = datetime.datetime(2018, 6, 25, 20, 21, 22, tzinfo=UTC)
where = WhereNode()
where.add(self._build_lookup("shadowLastChange", 'exact', dt, field=fields.TimestampField), AND)
self.assertEqual(self._where_as_ldap(where), "(shadowLastChange=1529958082)")
def test_and(self):
where = WhereNode()
where.add(self._build_lookup("cn", 'exact', "foo", field=fields.CharField), AND)
where.add(self._build_lookup("givenName", 'exact', "bar", field=fields.CharField), AND)
self.assertEqual(self._where_as_ldap(where), "(&(cn=foo)(givenName=bar))")
def test_or(self):
where = WhereNode()
where.add(self._build_lookup("cn", 'exact', "foo", field=fields.CharField), AND)
where.add(self._build_lookup("givenName", 'exact', "bar", field=fields.CharField), OR)
self.assertEqual(self._where_as_ldap(where), "(|(cn=foo)(givenName=bar))")
| {
"content_hash": "eb3017285ffbbb9ba4213f57c674c22e",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 104,
"avg_line_length": 43.421487603305785,
"alnum_prop": 0.6230491054434717,
"repo_name": "django-ldapdb/django-ldapdb",
"id": "f2da2d4f37278ade03d1a78a1358dc62057e9669",
"size": "10647",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ldapdb/tests.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "1102"
},
{
"name": "Python",
"bytes": "55654"
}
],
"symlink_target": ""
} |
import pytest
import six
from pyocd.core.options_manager import OptionsManager
from pyocd.core.options import OPTIONS_INFO
@pytest.fixture(scope='function')
def mgr():
return OptionsManager()
@pytest.fixture(scope='function')
def layer1():
return {
'foo': 1,
'bar': 2,
'baz': 3,
'auto_unlock': False,
}
@pytest.fixture(scope='function')
def layer2():
return {
'baz': 33,
'dogcow': 777,
}
class TestOptionsManager(object):
def test_defaults(self, mgr):
assert mgr.get('auto_unlock') == OPTIONS_INFO['auto_unlock'].default
assert mgr['auto_unlock'] == OPTIONS_INFO['auto_unlock'].default
assert 'auto_unlock' not in mgr
assert mgr.get_default('auto_unlock') == OPTIONS_INFO['auto_unlock'].default
def test_a(self, mgr, layer1):
mgr.add_front(layer1)
assert 'auto_unlock' in mgr
assert mgr.get('auto_unlock') == False
def test_b(self, mgr, layer1):
mgr.add_front(layer1)
mgr.add_front({'auto_unlock': True})
assert 'auto_unlock' in mgr
assert mgr.get('auto_unlock') == True
def test_c(self, mgr, layer1):
mgr.add_front(layer1)
mgr.add_back({'auto_unlock': True})
assert 'auto_unlock' in mgr
assert mgr.get('auto_unlock') == False
def test_none_value(self, mgr):
mgr.add_back({'auto_unlock': None})
assert 'auto_unlock' not in mgr
assert mgr.get('auto_unlock') == True
def test_convert_double_underscore(self, mgr):
mgr.add_back({'debug__traceback': False})
assert 'debug.traceback' in mgr
assert mgr.get('debug.traceback') == False
def test_set(self, mgr, layer1):
mgr.add_front(layer1)
mgr.set('buzz', 1234)
assert mgr['buzz'] == 1234
mgr.add_front({'buzz': 4321})
assert mgr.get('buzz') == 4321
def test_update(self, mgr, layer1, layer2):
mgr.add_front(layer1)
mgr.add_front(layer2)
mgr.update({'foo': 888, 'debug__traceback': False})
assert mgr['foo'] == 888
assert mgr.get('debug.traceback') == False
def test_notify_set(self, mgr, layer1):
mgr.add_front(layer1)
flag = [False]
def cb(note):
flag[0] = True
assert note.event == 'foo'
assert note.source == mgr
assert note.data.new_value == 100 and note.data.old_value == 1
mgr.subscribe(cb, 'foo')
mgr.set('foo', 100)
assert flag[0] == True
def test_notify_layer(self, mgr, layer1, layer2):
mgr.add_front(layer1)
flag = [False]
def cb(note):
flag[0] = True
assert note.event == 'baz'
assert note.source == mgr
assert note.data.new_value == 33 and note.data.old_value == 3
mgr.subscribe(cb, 'baz')
mgr.add_front(layer2)
assert flag[0] == True
def test_notify_back_layer(self, mgr, layer1, layer2):
mgr.add_front(layer1)
flag = [False]
def cb(note):
flag[0] = True
mgr.subscribe(cb, 'baz')
mgr.add_back(layer2)
assert flag[0] == False
| {
"content_hash": "c7aace8df7b3a1fc7429c4d34bcb1a21",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 84,
"avg_line_length": 30.046296296296298,
"alnum_prop": 0.5651771956856703,
"repo_name": "flit/pyOCD",
"id": "518d2a96d9f1e492448212674ff8fa9764a0bb0d",
"size": "3880",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "test/unit/test_options_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1410"
},
{
"name": "Batchfile",
"bytes": "741"
},
{
"name": "C",
"bytes": "3904"
},
{
"name": "Makefile",
"bytes": "4391"
},
{
"name": "Python",
"bytes": "3603471"
}
],
"symlink_target": ""
} |
from . import domainresource
class Linkage(domainresource.DomainResource):
""" Links records for 'same' item.
Identifies two or more records (resource instances) that are referring to
the same real-world "occurrence".
"""
resource_type = "Linkage"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.active = None
""" Whether this linkage assertion is active or not.
Type `bool`. """
self.author = None
""" Who is responsible for linkages.
Type `FHIRReference` referencing `Practitioner, Organization` (represented as `dict` in JSON). """
self.item = None
""" Item to be linked.
List of `LinkageItem` items (represented as `dict` in JSON). """
super(Linkage, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(Linkage, self).elementProperties()
js.extend([
("active", "active", bool, False, None, False),
("author", "author", fhirreference.FHIRReference, False, None, False),
("item", "item", LinkageItem, True, None, True),
])
return js
from . import backboneelement
class LinkageItem(backboneelement.BackboneElement):
""" Item to be linked.
Identifies one of the records that is considered to refer to the same real-
world occurrence as well as how the items hould be evaluated within the
collection of linked items.
"""
resource_type = "LinkageItem"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.resource = None
""" Resource being linked.
Type `FHIRReference` (represented as `dict` in JSON). """
self.type = None
""" source | alternate | historical.
Type `str`. """
super(LinkageItem, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(LinkageItem, self).elementProperties()
js.extend([
("resource", "resource", fhirreference.FHIRReference, False, None, True),
("type", "type", str, False, None, True),
])
return js
import sys
try:
from . import fhirreference
except ImportError:
fhirreference = sys.modules[__package__ + '.fhirreference']
| {
"content_hash": "19a18a8e69e8a050c9a7ddaf37b098f8",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 106,
"avg_line_length": 34.40229885057471,
"alnum_prop": 0.6144336785833612,
"repo_name": "all-of-us/raw-data-repository",
"id": "68a21d3c84cb71bfe8920ac77eca7bb17324a697",
"size": "3169",
"binary": false,
"copies": "1",
"ref": "refs/heads/devel",
"path": "rdr_service/lib_fhir/fhirclient_3_0_0/models/linkage.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1866"
},
{
"name": "Mako",
"bytes": "1715"
},
{
"name": "Python",
"bytes": "17040924"
},
{
"name": "R",
"bytes": "2212"
},
{
"name": "Shell",
"bytes": "92213"
}
],
"symlink_target": ""
} |
from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
req = BasicHttp(url)
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['header']['Content-Type'],
'file_size': res['header']['Content-Length']
}
return data
| {
"content_hash": "3fc1e66542d42e9211a5eeaaab80dd1e",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 54,
"avg_line_length": 18.56,
"alnum_prop": 0.5732758620689655,
"repo_name": "nachopro/followlink",
"id": "37cfc77c02d02b40eb454f0424307f74a68336b8",
"size": "464",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/links/functions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "PHP",
"bytes": "20852"
},
{
"name": "Python",
"bytes": "21217"
}
],
"symlink_target": ""
} |
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript, OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP
from binascii import unhexlify
import cStringIO
import time
def cltv_invalidate(tx):
'''Modify the signature in vin 0 of the tx to fail CLTV
Prepends -1 CLTV DROP in the scriptSig itself.
'''
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP] +
list(CScript(tx.vin[0].scriptSig)))
'''
This test is meant to exercise BIP65 (CHECKLOCKTIMEVERIFY)
Connect to a single node.
Mine 2 (version 3) blocks (save the coinbases for later).
Generate 98 more version 3 blocks, verify the node accepts.
Mine 749 version 4 blocks, verify the node accepts.
Check that the new CLTV rules are not enforced on the 750th version 4 block.
Check that the new CLTV rules are enforced on the 751st version 4 block.
Mine 199 new version blocks.
Mine 1 old-version block.
Mine 1 new version block.
Mine 1 old version block, see that the node rejects.
'''
class BIP65Test(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
def setup_network(self):
# Must set the blockversion for this test
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=3']],
binary=[self.options.testbinary])
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = cStringIO.StringIO(unhexlify(signresult['hex']))
tx.deserialize(f)
return tx
def get_tests(self):
self.coinbase_blocks = self.nodes[0].generate(2)
height = 3 # height of the next block to build
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = time.time()
''' 98 more version 3 blocks '''
test_blocks = []
for i in xrange(98):
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 749 version 4 blocks '''
test_blocks = []
for i in xrange(749):
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 4
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance(test_blocks, sync_every_block=False)
'''
Check that the new CLTV rules are not enforced in the 750th
version 3 block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0], self.nodeaddress, 1.0)
cltv_invalidate(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 4
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance([[block, True]])
'''
Check that the new CLTV rules are enforced in the 751st version 4
block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[1], self.nodeaddress, 1.0)
cltv_invalidate(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 4
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
''' Mine 199 new version blocks on last valid tip '''
test_blocks = []
for i in xrange(199):
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 4
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 1 old version block '''
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance([[block, True]])
''' Mine 1 new version block '''
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 4
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance([[block, True]])
''' Mine 1 old version block, should be invalid '''
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
if __name__ == '__main__':
BIP65Test().main()
| {
"content_hash": "30a0b697713e6e13d4ed1c3383a06bdc",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 100,
"avg_line_length": 36.82954545454545,
"alnum_prop": 0.6055229867324899,
"repo_name": "dragosbdi/bitcredit-2.0",
"id": "3a164f2511a8a330fccca82fc54ab26b218954e0",
"size": "6700",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/bip65-cltv-p2p.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "759205"
},
{
"name": "C++",
"bytes": "4523312"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "3870"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2102"
},
{
"name": "M4",
"bytes": "153274"
},
{
"name": "Makefile",
"bytes": "102938"
},
{
"name": "Objective-C",
"bytes": "2168"
},
{
"name": "Objective-C++",
"bytes": "7248"
},
{
"name": "Protocol Buffer",
"bytes": "2316"
},
{
"name": "Python",
"bytes": "668870"
},
{
"name": "QMake",
"bytes": "2022"
},
{
"name": "Shell",
"bytes": "365867"
}
],
"symlink_target": ""
} |
from . import attributes, entity, player, mobile, archetype, feature, env, article, server
| {
"content_hash": "8a2a496dfcf299edc7293d5f34c2e80c",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 90,
"avg_line_length": 91,
"alnum_prop": 0.7692307692307693,
"repo_name": "genzgd/Lampost-Mud",
"id": "476c0d6903be519b52aae992cd9944784ddd9302",
"size": "91",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lampmud/lpmud/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17332"
},
{
"name": "HTML",
"bytes": "117576"
},
{
"name": "JavaScript",
"bytes": "217160"
},
{
"name": "Python",
"bytes": "121355"
}
],
"symlink_target": ""
} |
from .components import *
| {
"content_hash": "9648105a5348e52b083e1051dbac2eb5",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 25,
"avg_line_length": 26,
"alnum_prop": 0.7692307692307693,
"repo_name": "csdms/topoflow-bridge",
"id": "dfc3c948e0dff4ce15d45d21e656f6c59164ce55",
"size": "26",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "topoflow_bridge/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "29875"
}
],
"symlink_target": ""
} |
import logging
import numpy as np
from mxnet import context as ctx
from mxnet import ndarray as nd
from mxnet.io import DataDesc
from mxnet.executor_manager import _split_input_slice
def _load_general(data, targets, major_axis):
"""Load a list of arrays into a list of arrays specified by slices"""
for d_src, d_targets in zip(data, targets):
if isinstance(d_targets, nd.NDArray):
d_src.copyto(d_targets)
elif isinstance(d_src, (list, tuple)):
for src, dst in zip(d_src, d_targets):
src.copyto(dst)
else:
raise NotImplementedError
def _load_data(batch, targets, major_axis):
"""Load data into sliced arrays"""
_load_general(batch.data, targets, major_axis)
def _load_label(batch, targets, major_axis):
"""Load label into sliced arrays"""
_load_general(batch.label, targets, major_axis)
def _merge_multi_context(outputs, major_axis):
"""Merge outputs that lives on multiple context into one, so that they look
like living on one context.
"""
rets = []
for tensors, axis in zip(outputs, major_axis):
if axis >= 0:
rets.append(nd.concatenate(tensors, axis=axis, always_copy=False))
else:
# negative axis means the there is no batch_size axis, and all the
# results should be the same on each device. We simply take the
# first one, without checking they are actually the same
rets.append(tensors[0])
return rets
class DataParallelExecutorGroup(object):
"""DataParallelExecutorGroup is a group of executors that lives on a group of devices.
This is a helper class used to implement data parallelization. Each mini-batch will
be split and run on the devices.
Parameters
----------
symbol : Symbol
The common symbolic computation graph for all executors.
contexts : list
A list of contexts.
workload : list
If not `None`, could be a list of numbers that specify the workload to be assigned
to different context. Larger number indicate heavier workload.
data_shapes : list
Should be a list of (name, shape) tuples, for the shapes of data. Note the order is
important and should be the same as the order that the `DataIter` provide the data.
label_shapes : list
Should be a list of (name, shape) tuples, for the shapes of label. Note the order is
important and should be the same as the order that the `DataIter` provide the label.
param_names : list
A list of strings, indicating the names of parameters (e.g. weights, filters, etc.)
in the computation graph.
for_training : bool
Indicate whether the executors should be bind for training. When not doing training,
the memory for gradients will not be allocated.
inputs_need_grad : bool
Indicate whether the gradients for the input data should be computed. This is currently
not used. It will be useful for implementing composition of modules.
shared_group : DataParallelExecutorGroup
Default is `None`. This is used in bucketing. When not `None`, it should be a executor
group corresponding to a different bucket. In other words, it will correspond to a different
symbol but with the same set of parameters (e.g. unrolled RNNs with different lengths).
In this case, many memory will be shared.
logger : Logger
Default is `logging`.
fixed_param_names: list of str
Indicate parameters to be fixed during training. Parameters in this list will not allocate
space for gradient, nor do gradient calculation.
grad_req : str, list of str, dict of str to str
Requirement for gradient accumulation. Can be 'write', 'add', or 'null'
(default to 'write').
Can be specified globally (str) or for each argument (list, dict).
"""
def __init__(self, symbol, contexts, workload, data_shapes, label_shapes, param_names,
for_training, inputs_need_grad, shared_group=None, logger=logging,
fixed_param_names=None, grad_req='write', state_names=None):
self.param_names = param_names
self.arg_names = symbol.list_arguments()
self.aux_names = symbol.list_auxiliary_states()
self.symbol = symbol
self.contexts = contexts
self.workload = workload
self.for_training = for_training
self.inputs_need_grad = inputs_need_grad
self.logger = logger
#In the future we should have a better way to profile memory per device (haibin)
# self._total_exec_bytes = 0
self.fixed_param_names = fixed_param_names
if self.fixed_param_names is None:
self.fixed_param_names = []
self.state_names = state_names
if self.state_names is None:
self.state_names = []
if not for_training:
grad_req = 'null'
# data_shapes = [x if isinstance(x, DataDesc) else DataDesc(*x) for x in data_shapes]
# if label_shapes is not None:
# label_shapes = [x if isinstance(x, DataDesc) else DataDesc(*x) for x in label_shapes]
data_names = [x.name for x in data_shapes[0]]
if isinstance(grad_req, str):
self.grad_req = {}
for k in self.arg_names:
if k in self.param_names:
self.grad_req[k] = 'null' if k in self.fixed_param_names else grad_req
elif k in data_names:
self.grad_req[k] = grad_req if self.inputs_need_grad else 'null'
else:
self.grad_req[k] = 'null'
elif isinstance(grad_req, (list, tuple)):
assert len(grad_req) == len(self.arg_names)
self.grad_req = dict(zip(self.arg_names, grad_req))
elif isinstance(grad_req, dict):
self.grad_req = {}
for k in self.arg_names:
if k in self.param_names:
self.grad_req[k] = 'null' if k in self.fixed_param_names else 'write'
elif k in data_names:
self.grad_req[k] = 'write' if self.inputs_need_grad else 'null'
else:
self.grad_req[k] = 'null'
self.grad_req.update(grad_req)
else:
raise ValueError("grad_req must be one of str, list, tuple, or dict.")
if shared_group is not None:
self.shared_data_arrays = shared_group.shared_data_arrays
else:
self.shared_data_arrays = [{} for _ in contexts]
# initialize some instance variables
self.batch_size = len(data_shapes)
self.slices = None
self.execs = []
self._default_execs = None
self.data_arrays = None
self.label_arrays = None
self.param_arrays = None
self.state_arrays = None
self.grad_arrays = None
self.aux_arrays = None
self.input_grad_arrays = None
self.data_shapes = None
self.label_shapes = None
self.data_layouts = None
self.label_layouts = None
self.output_layouts = [DataDesc.get_batch_axis(self.symbol[name].attr('__layout__'))
for name in self.symbol.list_outputs()]
self.bind_exec(data_shapes, label_shapes, shared_group)
def decide_slices(self, data_shapes):
"""Decide the slices for each context according to the workload.
Parameters
----------
data_shapes : list
list of (name, shape) specifying the shapes for the input data or label.
"""
assert len(data_shapes) > 0
major_axis = [DataDesc.get_batch_axis(x.layout) for x in data_shapes]
for (name, shape), axis in zip(data_shapes, major_axis):
if axis == -1:
continue
batch_size = shape[axis]
if self.batch_size is not None:
assert batch_size == self.batch_size, ("all data must have the same batch size: "
+ ("batch_size = %d, but " % self.batch_size)
+ ("%s has shape %s" % (name, shape)))
else:
self.batch_size = batch_size
self.slices = _split_input_slice(self.batch_size, self.workload)
return major_axis
def _collect_arrays(self):
"""Collect internal arrays from executors."""
# convenient data structures
# self.data_arrays = [[(self.slices[i], e.arg_dict[name]) for i, e in enumerate(self.execs)]
# for name, _ in self.data_shapes]
self.data_arrays = [[e.arg_dict[name] for name, _ in self.data_shapes[0]] for e in self.execs]
self.state_arrays = [[e.arg_dict[name] for e in self.execs]
for name in self.state_names]
if self.label_shapes is not None:
# self.label_arrays = [[(self.slices[i], e.arg_dict[name])
# for i, e in enumerate(self.execs)]
# for name, _ in self.label_shapes]
self.label_arrays = [[e.arg_dict[name] for name, _ in self.label_shapes[0]] for e in self.execs]
else:
self.label_arrays = None
self.param_arrays = [[exec_.arg_arrays[i] for exec_ in self.execs]
for i, name in enumerate(self.arg_names)
if name in self.param_names]
if self.for_training:
self.grad_arrays = [[exec_.grad_arrays[i] for exec_ in self.execs]
for i, name in enumerate(self.arg_names)
if name in self.param_names]
else:
self.grad_arrays = None
data_names = [x[0] for x in self.data_shapes]
if self.inputs_need_grad:
self.input_grad_arrays = [[exec_.grad_arrays[i] for exec_ in self.execs]
for i, name in enumerate(self.arg_names)
if name in data_names]
else:
self.input_grad_arrays = None
self.aux_arrays = [[exec_.aux_arrays[i] for exec_ in self.execs]
for i in range(len(self.aux_names))]
def bind_exec(self, data_shapes, label_shapes, shared_group=None, reshape=False):
"""Bind executors on their respective devices.
Parameters
----------
data_shapes : list
label_shapes : list
shared_group : DataParallelExecutorGroup
reshape : bool
"""
assert reshape or not self.execs
# self.batch_size = None
# calculate workload and bind executors
# self.data_layouts = self.decide_slices(data_shapes)
# if label_shapes is not None:
# # call it to make sure labels has the same batch size as data
# self.label_layouts = self.decide_slices(label_shapes)
for i in range(len(self.contexts)):
# data_shapes_i = self._sliced_shape(data_shapes, i, self.data_layouts)
data_shapes_i = data_shapes[i]
if label_shapes is not None:
label_shapes_i = label_shapes[i]
# label_shapes_i = self._sliced_shape(label_shapes, i, self.label_layouts)
else:
label_shapes_i = []
if reshape:
self.execs[i] = self._default_execs[i].reshape(
allow_up_sizing=True, **dict(data_shapes_i + label_shapes_i))
else:
self.execs.append(self._bind_ith_exec(i, data_shapes_i, label_shapes_i,
shared_group))
self.data_shapes = data_shapes
self.label_shapes = label_shapes
self._collect_arrays()
def reshape(self, data_shapes, label_shapes):
"""Reshape executors.
Parameters
----------
data_shapes : list
label_shapes : list
"""
if self._default_execs is None:
self._default_execs = [i for i in self.execs]
for i in range(len(self.contexts)):
self.execs[i] = self._default_execs[i].reshape(
allow_up_sizing=True, **dict(data_shapes[i] + (label_shapes[i] if label_shapes is not None else []))
)
self.data_shapes = data_shapes
self.label_shapes = label_shapes
self._collect_arrays()
def set_params(self, arg_params, aux_params):
"""Assign, i.e. copy parameters to all the executors.
Parameters
----------
arg_params : dict
A dictionary of name to `NDArray` parameter mapping.
aux_params : dict
A dictionary of name to `NDArray` auxiliary variable mapping.
"""
for exec_ in self.execs:
exec_.copy_params_from(arg_params, aux_params)
def get_params(self, arg_params, aux_params):
""" Copy data from each executor to `arg_params` and `aux_params`.
Parameters
----------
arg_params : list of NDArray
target parameter arrays
aux_params : list of NDArray
target aux arrays
Notes
-----
- This function will inplace update the NDArrays in arg_params and aux_params.
"""
for name, block in zip(self.param_names, self.param_arrays):
weight = sum(w.copyto(ctx.cpu()) for w in block) / len(block)
weight.astype(arg_params[name].dtype).copyto(arg_params[name])
for name, block in zip(self.aux_names, self.aux_arrays):
weight = sum(w.copyto(ctx.cpu()) for w in block) / len(block)
weight.astype(aux_params[name].dtype).copyto(aux_params[name])
def forward(self, data_batch, is_train=None):
"""Split `data_batch` according to workload and run forward on each devices.
Parameters
----------
data_batch : DataBatch
Or could be any object implementing similar interface.
is_train : bool
The hint for the backend, indicating whether we are during training phase.
Default is `None`, then the value `self.for_training` will be used.
Returns
-------
"""
_load_data(data_batch, self.data_arrays, self.data_layouts)
if is_train is None:
is_train = self.for_training
if self.label_arrays is not None:
assert not is_train or data_batch.label
if data_batch.label:
_load_label(data_batch, self.label_arrays, self.label_layouts)
for exec_ in self.execs:
exec_.forward(is_train=is_train)
def get_outputs(self, merge_multi_context=True):
"""Get outputs of the previous forward computation.
Parameters
----------
merge_multi_context : bool
Default is `True`. In the case when data-parallelism is used, the outputs
will be collected from multiple devices. A `True` value indicate that we
should merge the collected results so that they look like from a single
executor.
Returns
-------
If `merge_multi_context` is `True`, it is like `[out1, out2]`. Otherwise, it
is like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`. All the output
elements are `NDArray`.
"""
outputs = [[exec_.outputs[i] for exec_ in self.execs]
for i in range(len(self.execs[0].outputs))]
if merge_multi_context:
outputs = _merge_multi_context(outputs, self.output_layouts)
return outputs
def get_states(self, merge_multi_context=True):
"""Get states from all devices
Parameters
----------
merge_multi_context : bool
Default is `True`. In the case when data-parallelism is used, the states
will be collected from multiple devices. A `True` value indicate that we
should merge the collected results so that they look like from a single
executor.
Returns
-------
If `merge_multi_context` is `True`, it is like `[out1, out2]`. Otherwise, it
is like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`. All the output
elements are `NDArray`.
"""
assert not merge_multi_context, \
"merge_multi_context=True is not supported for get_states yet."
return self.state_arrays
def set_states(self, states=None, value=None):
"""Set value for states. Only one of states & value can be specified.
Parameters
----------
states : list of list of NDArrays
source states arrays formatted like [[state1_dev1, state1_dev2],
[state2_dev1, state2_dev2]].
value : number
a single scalar value for all state arrays.
"""
if states is not None:
assert value is None, "Only one of states & value can be specified."
_load_general(states, self.state_arrays, (0,)*len(states))
else:
assert value is not None, "At least one of states & value must be specified."
assert states is None, "Only one of states & value can be specified."
for d_dst in self.state_arrays:
for dst in d_dst:
dst[:] = value
def get_input_grads(self, merge_multi_context=True):
"""Get the gradients with respect to the inputs of the module.
Parameters
----------
merge_multi_context : bool
Default is `True`. In the case when data-parallelism is used, the outputs
will be collected from multiple devices. A `True` value indicate that we
should merge the collected results so that they look like from a single
executor.
Returns
-------
If `merge_multi_context` is `True`, it is like `[grad1, grad2]`. Otherwise, it
is like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`. All the output
elements are `NDArray`.
"""
assert self.inputs_need_grad
if merge_multi_context:
return _merge_multi_context(self.input_grad_arrays, self.data_layouts)
return self.input_grad_arrays
def backward(self, out_grads=None):
"""Run backward on all devices. A backward should be called after
a call to the forward function. Backward cannot be called unless
`self.for_training` is `True`.
Parameters
----------
out_grads : NDArray or list of NDArray, optional
Gradient on the outputs to be propagated back.
This parameter is only needed when bind is called
on outputs that are not a loss function.
"""
assert self.for_training, 're-bind with for_training=True to run backward'
if out_grads is None:
out_grads = []
# for i, (exec_, islice) in enumerate(zip(self.execs, self.slices)):
for i, exec_ in enumerate(self.execs):
out_grads_slice = []
exec_.backward(out_grads=out_grads_slice)
def update_metric(self, eval_metric, labels):
"""Accumulate the performance according to `eval_metric` on all devices.
Parameters
----------
eval_metric : EvalMetric
The metric used for evaluation.
labels : list of NDArray
Typically comes from `label` of a `DataBatch`.
"""
for texec, labels in zip(self.execs, labels):
eval_metric.update(labels, texec.outputs)
def _bind_ith_exec(self, i, data_shapes, label_shapes, shared_group):
"""Internal utility function to bind the i-th executor.
"""
shared_exec = None if shared_group is None else shared_group.execs[i]
context = self.contexts[i]
shared_data_arrays = self.shared_data_arrays[i]
input_shapes = dict(data_shapes)
if label_shapes is not None:
input_shapes.update(dict(label_shapes))
arg_shapes, _, aux_shapes = self.symbol.infer_shape(**input_shapes)
assert arg_shapes is not None, "shape inference failed"
input_types = {x.name: x.dtype for x in data_shapes}
if label_shapes is not None:
input_types.update({x.name: x.dtype for x in label_shapes})
arg_types, _, aux_types = self.symbol.infer_type(**input_types)
assert arg_types is not None, "type inference failed"
arg_arrays = []
grad_arrays = {} if self.for_training else None
def _get_or_reshape(name, shared_data_arrays, arg_shape, arg_type, context, logger):
"""Internal helper to get a memory block or re-use by re-shaping"""
if name in shared_data_arrays:
arg_arr = shared_data_arrays[name]
if np.prod(arg_arr.shape) >= np.prod(arg_shape):
# nice, we can directly re-use this data blob
assert arg_arr.dtype == arg_type
arg_arr = arg_arr.reshape(arg_shape)
else:
logger.warning(('bucketing: data "%s" has a shape %s' % (name, arg_shape)) +
(', which is larger than already allocated ') +
('shape %s' % (arg_arr.shape,)) +
('. Need to re-allocate. Consider putting ') +
('default_bucket_key to') +
(' be the bucket taking the largest input for better ') +
('memory sharing.'))
arg_arr = nd.zeros(arg_shape, context, dtype=arg_type)
# replace existing shared array because the new one is bigger
shared_data_arrays[name] = arg_arr
else:
arg_arr = nd.zeros(arg_shape, context, dtype=arg_type)
shared_data_arrays[name] = arg_arr
return arg_arr
# create or borrow arguments and gradients
for j in range(len(self.arg_names)):
name = self.arg_names[j]
if name in self.param_names: # model parameters
if shared_exec is None:
arg_arr = nd.zeros(arg_shapes[j], context, dtype=arg_types[j])
if self.grad_req[name] != 'null':
grad_arr = nd.zeros(arg_shapes[j], context, dtype=arg_types[j])
grad_arrays[name] = grad_arr
else:
arg_arr = shared_exec.arg_dict[name]
assert arg_arr.shape == arg_shapes[j]
assert arg_arr.dtype == arg_types[j]
if self.grad_req[name] != 'null':
grad_arrays[name] = shared_exec.grad_dict[name]
else: # data, label, or states
arg_arr = _get_or_reshape(name, shared_data_arrays, arg_shapes[j], arg_types[j],
context, self.logger)
# data might also need grad if inputs_need_grad is True
if self.grad_req[name] != 'null':
grad_arrays[name] = _get_or_reshape('grad of ' + name, shared_data_arrays,
arg_shapes[j], arg_types[j], context,
self.logger)
arg_arrays.append(arg_arr)
# create or borrow aux variables
if shared_exec is None:
aux_arrays = [nd.zeros(s, context, dtype=t) for s, t in zip(aux_shapes, aux_types)]
else:
for j, arr in enumerate(shared_exec.aux_arrays):
assert aux_shapes[j] == arr.shape
assert aux_types[j] == arr.dtype
aux_arrays = shared_exec.aux_arrays[:]
executor = self.symbol.bind(ctx=context, args=arg_arrays,
args_grad=grad_arrays, aux_states=aux_arrays,
grad_req=self.grad_req, shared_exec=shared_exec)
# Get the total bytes allocated for this executor
# self._total_exec_bytes += int(executor.debug_str().split('\n')[-3].split()[1])
return executor
def _sliced_shape(self, shapes, i, major_axis):
"""Get the sliced shapes for the i-th executor.
Parameters
----------
shapes : list of (str, tuple)
The original (name, shape) pairs.
i : int
Which executor we are dealing with.
"""
sliced_shapes = []
for desc, axis in zip(shapes, major_axis):
shape = list(desc.shape)
if axis >= 0:
shape[axis] = self.slices[i].stop - self.slices[i].start
sliced_shapes.append(DataDesc(desc.name, tuple(shape), desc.dtype, desc.layout))
return sliced_shapes
def install_monitor(self, mon):
"""Install monitor on all executors"""
for exe in self.execs:
mon.install(exe)
| {
"content_hash": "edda605b899397e60bed2e20a7d1ad97",
"timestamp": "",
"source": "github",
"line_count": 595,
"max_line_length": 116,
"avg_line_length": 42.4218487394958,
"alnum_prop": 0.569430688166079,
"repo_name": "deepinsight/Deformable-ConvNets",
"id": "15c8469024f2e33d2068a4e751b92d1447752a51",
"size": "25556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deeplab/core/DataParallelExecutorGroup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "347"
},
{
"name": "C",
"bytes": "9632"
},
{
"name": "C++",
"bytes": "116678"
},
{
"name": "Cuda",
"bytes": "392494"
},
{
"name": "Makefile",
"bytes": "366"
},
{
"name": "Python",
"bytes": "2276169"
},
{
"name": "Shell",
"bytes": "4043"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ResultTable.start_time'
db.add_column(u'windberg_results_resulttable', 'start_time',
self.gf('django.db.models.fields.TimeField')(default=datetime.datetime(2013, 10, 31, 0, 0)),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ResultTable.start_time'
db.delete_column(u'windberg_results_resulttable', 'start_time')
models = {
u'windberg_register.agegroup': {
'Meta': {'object_name': 'AgeGroup'},
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_detail': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_pseudo': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_age': ('django.db.models.fields.IntegerField', [], {}),
'min_age': ('django.db.models.fields.IntegerField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'short': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
u'windberg_register.run': {
'Meta': {'ordering': "['distance']", 'object_name': 'Run'},
'distance': ('django.db.models.fields.IntegerField', [], {}),
'has_ages': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'possible_ages': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['windberg_register.AgeGroup']", 'symmetrical': 'False'})
},
u'windberg_register.start': {
'Meta': {'object_name': 'Start'},
'creation_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'runs': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['windberg_register.Run']", 'symmetrical': 'False'}),
'start_time': ('django.db.models.fields.TimeField', [], {})
},
u'windberg_register.version': {
'Meta': {'object_name': 'Version'},
'date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'net_end': ('django.db.models.fields.DateField', [], {}),
'starts': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['windberg_register.Start']", 'symmetrical': 'False'})
},
u'windberg_results.resultentry': {
'Meta': {'object_name': 'ResultEntry'},
'age_group': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'birth_year': ('django.db.models.fields.DateField', [], {}),
'club': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'given': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'rank': ('django.db.models.fields.IntegerField', [], {}),
'rank_age': ('django.db.models.fields.IntegerField', [], {})
},
u'windberg_results.resulttable': {
'Meta': {'object_name': 'ResultTable'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'start_time': ('django.db.models.fields.TimeField', [], {}),
'version': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['windberg_register.Version']"})
}
}
complete_apps = ['windberg_results'] | {
"content_hash": "a1bfdcccab32eb475f9f3b74895eee8e",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 154,
"avg_line_length": 57.94805194805195,
"alnum_prop": 0.5502017032720753,
"repo_name": "janLo/Windberg-web",
"id": "214471a259a491ca0ec7f66d23fb36906f91fa06",
"size": "4486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "windberg_results/migrations/0002_auto__add_field_resulttable_start_time.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "7891"
},
{
"name": "JavaScript",
"bytes": "5217"
},
{
"name": "Python",
"bytes": "134738"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2017-2022, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file LICENSE, distributed with this software.
Created on May 20, 2017
"""
# flake8: noqa F401
from .activity import Activity
from .activity_indicator import ActivityIndicator
from .app_bar_layout import AppBarLayout
from .auto_complete_text_view import AutoCompleteTextView
from .bottom_sheet_dialog import BottomSheetDialog
#: Controls
from .button import Button, FloatingActionButton, ImageButton
from .calendar_view import CalendarView
from .camera_view import CameraView
from .card_view import CardView
from .checkbox import CheckBox
from .chronometer import Chronometer
from .compound_button import CompoundButton
from .coordinator_layout import CoordinatorLayout
from .date_picker import DatePicker
#: Dialogs
from .dialog import Dialog
from .drawer_layout import DrawerLayout
from .edit_text import EditText
from .flexbox import Flexbox
from .fragment import Fragment
from .frame_layout import FrameLayout
from .grid_layout import GridLayout
from .iconify import Icon, IconButton, IconToggleButton
# from .view_animator import ViewAnimator
# from .view_switcher import ViewSwitcher
# from .text_switcher import TextSwitcher
from .image_view import ImageView
#: Layouts
from .linear_layout import LinearLayout
from .list_view import ListItem, ListView
from .notification import Notification
from .picker import Picker
from .popup_window import PopupWindow
#: Widgets
from .progress_bar import ProgressBar
from .radio_button import RadioButton
from .radio_group import RadioGroup
from .rating_bar import RatingBar
from .relative_layout import RelativeLayout
from .scroll_view import ScrollView
from .seek_bar import SeekBar
from .snackbar import Snackbar
from .spinner import Spinner
from .surface_view import SurfaceView
from .swipe_refresh_layout import SwipeRefreshLayout
from .switch import Switch
from .tab_layout import TabFragment, TabLayout
from .text_clock import TextClock
from .text_view import TextView
from .texture_view import TextureView
#: Pickers
from .time_picker import TimePicker
from .toast import Toast
from .toggle_button import ToggleButton
from .toolbar import Toolbar
from .video_view import VideoView
#: Views
from .view import View
from .view_group import ViewGroup
from .view_pager import PagerFragment, PagerTabStrip, PagerTitleStrip, ViewPager
from .web_view import WebView
from .window import Window
# from .map_view import MapView, MapMarker, MapCircle, MapPolyline, MapPolygon
# from .chart_view import DataSet, LineChart, BarChart, ScatterChart, PieChart
| {
"content_hash": "7cec39d622b9dea7d4b96c7e3c811eaf",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 80,
"avg_line_length": 30.905882352941177,
"alnum_prop": 0.818804720213171,
"repo_name": "codelv/enaml-native",
"id": "d4a740324b0497bb9f06570d8725a2d98eac3787",
"size": "2627",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/enamlnative/widgets/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "12016"
},
{
"name": "Cython",
"bytes": "37131"
},
{
"name": "Java",
"bytes": "129792"
},
{
"name": "Makefile",
"bytes": "1341"
},
{
"name": "Objective-C",
"bytes": "31920"
},
{
"name": "Python",
"bytes": "669324"
},
{
"name": "Shell",
"bytes": "2048"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import print_function
from objects import TemporalObject, Link, PLink
class Axiom(TemporalObject):
def __init__(self,inRel,outRel,resultRel):
self.inRel = inRel
self.outRel = outRel
self.resultRel = resultRel
def __str__(self):
return "<%s %s %s>" % (self.inRel,self.outRel,self.resultRel)
class Closure(object):
def __init__(self,environment,closureType):
"""Initialize a closure object, there is a separate closure object for each graph
that closure is run over. isConsistent is set to 0 if closure derives a link that
is not consistent with an existing one. isClosed is only usefull if it is set to 0
each time a link is added."""
self.isConsistent = 1
self.isClosed = 0
self.debug = 0
self.environment = environment
self.closureType = closureType
if closureType == 'nodes':
self.nodes = environment.NODES
self.links = environment.LINKS
self.axioms = environment.AXIOMS
self.linkType = Link
elif closureType == 'points':
self.nodes = environment.POINTS
self.links = environment.PLINKS
self.axioms = environment.POINT_AXIOMS
self.linkType = PLink
else:
print("ERROR: unknown closure type")
return
def computeClosure(self):
"""Main loop for closure algorithm. Find dirty nodes and and close them. Closure
creates new links and link creation by default causes the begin and end nodes to
be marked dirty. But only link creation outside of closure should introduce new
dirtyness. Overwrite this by cleaning all nodes after closure."""
for node in self.nodes:
if node.isDirty: self.closeNode(node)
for node in self.nodes:
node.isDirty = 0
self.isClosed = 1
def closeNode(self,node):
if self.debug:
print("Closing node %s" % (node.string))
print(node.inLinks)
print(node.outLinks)
for inLink in node.inLinks:
for outLink in node.outLinks:
if self.debug: print(inLink.asPrettyString(),outLink.asPrettyString())
axiom = self.findAxiom(inLink,outLink)
if axiom:
self.printMessage1(node, axiom, inLink, outLink)
self.addLink(inLink,outLink,axiom.resultRel)
self.isDirty = 0
def addLink(self,inlink,outlink,relation):
node1 = inlink.begin
node2 = outlink.end
# find link from begin to end
existingLink = self.findLink(node1,relation,node2)
if existingLink:
#print "CLOSURE, existing link", existingLink
# existingLink either has < or =, check whether it is the same as
# the new link, if not, give inconsistency warning and return
if existingLink.relation != relation:
self.isConsistent = 0
#self.printMessage2(existingLink,relation,inlink,outlink)
return
# find reversed link from end to begin
existingLink = self.findLink(node2,relation,node1)
if existingLink:
#print "CLOSURE, existing reversed link", existingLink
# existing link can be < or =, < will clash with new relation,
# so only allowed combination is to have two ='s
if existingLink.relation == '<' or relation == '<':
self.isConsistent = 0
#self.printMessage2(existingLink,relation,inlink,outlink)
return
# Use stored link type so code can abstract away from
# node versus point distinction
newLink = self.linkType(self.environment,node1,relation,node2)
# BUT, that didn't work in PythonWin 2.2.3, it did not allow using
# __init__ on the superclass, therefore,
if self.closureType == 'xpoints':
newLink = PLink(self.environment,node1,relation,node2)
if self.closureType == 'xnodes':
newLink = Link(self.environment,node1,relation,node2)
#print "CLOSURE, adding link", newLink
newLink.history = "closure"
def findAxiom(self,link1,link2):
"""Quick and dirty way, really want to index the axioms to speed
this up for large axiom set."""
for axiom in self.axioms:
if link1.relation == axiom.inRel and link2.relation == axiom.outRel:
return axiom
return None
def findLink(self,node1,rel,node2):
"""Quick and dirty, index links to speed this up."""
#print "Finding link:", node1, rel, node2
for link in self.links:
if link.begin == node1 and link.end == node2:
return link
return None
def debugOn(self): self.debug = 1
def debugOff(self): self.debug = 0
def printMessage1(self,node,axiom,inlink,outlink):
if self.debug:
print("Closing:..")
print(" ", node)
print(" ", axiom)
print(" ", inlink)
print(" ", outlink)
def printMessage2(self,existingLink,relation,inlink,outlink):
print("\nWARNING: link already exists")
print(" %s" % (existingLink))
print(" %s" % (relation.upper()))
print(" %s" % (inlink))
print(" %s" % (outlink))
| {
"content_hash": "bebc4d90b17c78a0f34547aab30974b4",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 90,
"avg_line_length": 40.68888888888889,
"alnum_prop": 0.5996723102129984,
"repo_name": "tarsqi/ttk",
"id": "e61fb6715c8e5d11d7ca71a844144a1de8f67b4c",
"size": "5494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "components/merging/sputlink/rules/closure.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2624"
},
{
"name": "Perl",
"bytes": "237870"
},
{
"name": "Python",
"bytes": "1320210"
},
{
"name": "Shell",
"bytes": "5580"
}
],
"symlink_target": ""
} |
"""
============================
Faces dataset decompositions
============================
This example applies to :ref:`olivetti_faces` different unsupervised
matrix decomposition (dimension reduction) methods from the module
:py:mod:`sklearn.decomposition` (see the documentation chapter
:ref:`decompositions`) .
"""
print(__doc__)
# Authors: Vlad Niculae, Alexandre Gramfort
# License: BSD 3 clause
import logging
from time import time
import matplotlib.pyplot as plt
from numpy.random import RandomState
from sklearn import decomposition
from sklearn.cluster import MiniBatchKMeans
from sklearn.datasets import fetch_olivetti_faces
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
n_row, n_col = 2, 3
n_components = n_row * n_col
image_shape = (64, 64)
rng = RandomState(0)
###############################################################################
# Load faces data
dataset = fetch_olivetti_faces(shuffle=True, random_state=rng)
faces = dataset.data
n_samples, n_features = faces.shape
# global centering
faces_centered = faces - faces.mean(axis=0)
# local centering
faces_centered -= faces_centered.mean(axis=1).reshape(n_samples, -1)
print("Dataset consists of %d faces" % n_samples)
###############################################################################
def plot_gallery(title, images, n_col=n_col, n_row=n_row):
plt.figure(figsize=(2. * n_col, 2.26 * n_row))
plt.suptitle(title, size=16)
for i, comp in enumerate(images):
plt.subplot(n_row, n_col, i + 1)
vmax = max(comp.max(), -comp.min())
plt.imshow(comp.reshape(image_shape), cmap=plt.cm.gray,
interpolation='nearest',
vmin=-vmax, vmax=vmax)
plt.xticks(())
plt.yticks(())
plt.subplots_adjust(0.01, 0.05, 0.99, 0.93, 0.04, 0.)
###############################################################################
# List of the different estimators, whether to center and transpose the
# problem, and whether the transformer uses the clustering API.
estimators = [
('Eigenfaces - RandomizedPCA',
decomposition.RandomizedPCA(n_components=n_components, whiten=True),
True),
('Non-negative components - NMF',
decomposition.NMF(n_components=n_components, init='nndsvda', tol=5e-3),
False),
('Independent components - FastICA',
decomposition.FastICA(n_components=n_components, whiten=True),
True),
('Sparse comp. - MiniBatchSparsePCA',
decomposition.MiniBatchSparsePCA(n_components=n_components, alpha=0.8,
n_iter=100, batch_size=3,
random_state=rng),
True),
('MiniBatchDictionaryLearning',
decomposition.MiniBatchDictionaryLearning(n_components=15, alpha=0.1,
n_iter=50, batch_size=3,
random_state=rng),
True),
('Cluster centers - MiniBatchKMeans',
MiniBatchKMeans(n_clusters=n_components, tol=1e-3, batch_size=20,
max_iter=50, random_state=rng),
True),
('Factor Analysis components - FA',
decomposition.FactorAnalysis(n_components=n_components, max_iter=2),
True),
]
###############################################################################
# Plot a sample of the input data
plot_gallery("First centered Olivetti faces", faces_centered[:n_components])
###############################################################################
# Do the estimation and plot it
for name, estimator, center in estimators:
print("Extracting the top %d %s..." % (n_components, name))
t0 = time()
data = faces
if center:
data = faces_centered
estimator.fit(data)
train_time = (time() - t0)
print("done in %0.3fs" % train_time)
if hasattr(estimator, 'cluster_centers_'):
components_ = estimator.cluster_centers_
else:
components_ = estimator.components_
if hasattr(estimator, 'noise_variance_'):
plot_gallery("Pixelwise variance",
estimator.noise_variance_.reshape(1, -1), n_col=1,
n_row=1)
plot_gallery('%s - Train time %.1fs' % (name, train_time),
components_[:n_components])
plt.show()
| {
"content_hash": "811cc41ce3ddfe9da74505d62bfa9b78",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 79,
"avg_line_length": 33.41984732824427,
"alnum_prop": 0.5744632252169941,
"repo_name": "DailyActie/Surrogate-Model",
"id": "dffbc2c3c2df67ed717839a643666f15b13814d0",
"size": "4378",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "01-codes/scikit-learn-master/examples/decomposition/plot_faces_decomposition.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "345"
},
{
"name": "Batchfile",
"bytes": "18746"
},
{
"name": "C",
"bytes": "13004913"
},
{
"name": "C++",
"bytes": "14692003"
},
{
"name": "CMake",
"bytes": "72831"
},
{
"name": "CSS",
"bytes": "303488"
},
{
"name": "Fortran",
"bytes": "7339415"
},
{
"name": "HTML",
"bytes": "854774"
},
{
"name": "Java",
"bytes": "38854"
},
{
"name": "JavaScript",
"bytes": "2432846"
},
{
"name": "Jupyter Notebook",
"bytes": "829689"
},
{
"name": "M4",
"bytes": "1379"
},
{
"name": "Makefile",
"bytes": "48708"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PHP",
"bytes": "93585"
},
{
"name": "Pascal",
"bytes": "1449"
},
{
"name": "Perl",
"bytes": "1152272"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "34668203"
},
{
"name": "Roff",
"bytes": "5925"
},
{
"name": "Ruby",
"bytes": "92498"
},
{
"name": "Shell",
"bytes": "94698"
},
{
"name": "TeX",
"bytes": "156540"
},
{
"name": "TypeScript",
"bytes": "41691"
}
],
"symlink_target": ""
} |
"""
Test cases for twisted.reflect module.
"""
import weakref
# Twisted Imports
from twisted.trial import unittest
from twisted.python import reflect
class SettableTest(unittest.TestCase):
def setUp(self):
self.setter = reflect.Settable()
def tearDown(self):
del self.setter
def testSet(self):
self.setter(a=1, b=2)
self.failUnlessEqual(self.setter.a, 1)
self.failUnlessEqual(self.setter.b, 2)
class AccessorTester(reflect.Accessor):
def set_x(self, x):
self.y = x
self.reallySet('x',x)
def get_z(self):
self.q = 1
return 1
def del_z(self):
self.reallyDel("q")
class AccessorTest(unittest.TestCase):
def setUp(self):
self.tester = AccessorTester()
def testSet(self):
self.tester.x = 1
self.failUnlessEqual(self.tester.x, 1)
self.failUnlessEqual(self.tester.y, 1)
def testGet(self):
self.failUnlessEqual(self.tester.z, 1)
self.failUnlessEqual(self.tester.q, 1)
def testDel(self):
self.tester.z
self.failUnlessEqual(self.tester.q, 1)
del self.tester.z
self.failUnlessEqual(hasattr(self.tester, "q"), 0)
self.tester.x = 1
del self.tester.x
self.failUnlessEqual(hasattr(self.tester, "x"), 0)
class LookupsTestCase(unittest.TestCase):
"""Test lookup methods."""
def testClassLookup(self):
self.assertEquals(reflect.namedClass("twisted.python.reflect.Summer"), reflect.Summer)
def testModuleLookup(self):
self.assertEquals(reflect.namedModule("twisted.python.reflect"), reflect)
class LookupsTestCaseII(unittest.TestCase):
def testPackageLookup(self):
import twisted.python
self.failUnlessIdentical(reflect.namedAny("twisted.python"),
twisted.python)
def testModuleLookup(self):
self.failUnlessIdentical(reflect.namedAny("twisted.python.reflect"),
reflect)
def testClassLookup(self):
self.failUnlessIdentical(reflect.namedAny("twisted.python."
"reflect.Summer"),
reflect.Summer)
def testAttributeLookup(self):
# Note - not failUnlessIdentical because unbound method lookup
# creates a new object every time. This is a foolishness of
# Python's object implementation, not a bug in Twisted.
self.failUnlessEqual(reflect.namedAny("twisted.python."
"reflect.Summer.reallySet"),
reflect.Summer.reallySet)
def testSecondAttributeLookup(self):
self.failUnlessIdentical(reflect.namedAny("twisted.python."
"reflect.Summer."
"reallySet.__doc__"),
reflect.Summer.reallySet.__doc__)
def testExceptionHandling(self):
# If the namedAny causes a module to be imported, errors in the
# import should not be masked.
self.assertRaises(
ZeroDivisionError,
reflect.namedAny, "twisted.test.reflect_helper_ZDE")
self.assertRaises(
ValueError,
reflect.namedAny, "twisted.test.reflect_helper_VE")
# And attributes that don't exist should raise an AttributeError
self.assertRaises(
AttributeError,
reflect.namedAny, "twisted.nosuchmoduleintheworld")
self.assertRaises(
AttributeError,
reflect.namedAny, "twisted.python.reflect.Summer.nosuchattributeintheworld")
# Finally, invalid module names should raise a ValueError
self.assertRaises(
ValueError,
reflect.namedAny, "")
self.assertRaises(
ValueError,
reflect.namedAny, "12345")
self.assertRaises(
ValueError,
reflect.namedAny, "@#$@(#.!@(#!@#")
self.assertRaises(
ValueError,
reflect.namedAny, "tcelfer.nohtyp.detsiwt")
class ObjectGrep(unittest.TestCase):
def testDictionary(self):
o = object()
d1 = {None: o}
d2 = {o: None}
self.assertIn("[None]", reflect.objgrep(d1, o, reflect.isSame))
self.assertIn("{None}", reflect.objgrep(d2, o, reflect.isSame))
def testList(self):
o = object()
L = [None, o]
self.assertIn("[1]", reflect.objgrep(L, o, reflect.isSame))
def testTuple(self):
o = object()
T = (o, None)
self.assertIn("[0]", reflect.objgrep(T, o, reflect.isSame))
def testInstance(self):
class Dummy:
pass
o = object()
d = Dummy()
d.o = o
self.assertIn(".o", reflect.objgrep(d, o, reflect.isSame))
def testWeakref(self):
class Dummy:
pass
o = Dummy()
w1 = weakref.ref(o)
self.assertIn("()", reflect.objgrep(w1, o, reflect.isSame))
def testBoundMethod(self):
class Dummy:
def dummy(self):
pass
o = Dummy()
m = o.dummy
self.assertIn(".im_self", reflect.objgrep(m, m.im_self, reflect.isSame))
self.assertIn(".im_class", reflect.objgrep(m, m.im_class, reflect.isSame))
self.assertIn(".im_func", reflect.objgrep(m, m.im_func, reflect.isSame))
def testEverything(self):
class Dummy:
def method(self):
pass
o = Dummy()
D1 = {(): "baz", None: "Quux", o: "Foosh"}
L = [None, (), D1, 3]
T = (L, {}, Dummy())
D2 = {0: "foo", 1: "bar", 2: T}
i = Dummy()
i.attr = D2
m = i.method
w = weakref.ref(m)
self.assertIn("().im_self.attr[2][0][2]{'Foosh'}", reflect.objgrep(w, o, reflect.isSame))
def testDepthLimit(self):
a = []
b = [a]
c = [a, b]
d = [a, c]
self.assertEquals(['[0]'], reflect.objgrep(d, a, reflect.isSame, maxDepth=1))
self.assertEquals(['[0]', '[1][0]'], reflect.objgrep(d, a, reflect.isSame, maxDepth=2))
self.assertEquals(['[0]', '[1][0]', '[1][1][0]'], reflect.objgrep(d, a, reflect.isSame, maxDepth=3))
class GetClass(unittest.TestCase):
def testOld(self):
class OldClass:
pass
old = OldClass()
self.assertIn(reflect.getClass(OldClass).__name__, ('class', 'classobj'))
self.assertEquals(reflect.getClass(old).__name__, 'OldClass')
def testNew(self):
class NewClass(object):
pass
new = NewClass()
self.assertEquals(reflect.getClass(NewClass).__name__, 'type')
self.assertEquals(reflect.getClass(new).__name__, 'NewClass')
class Breakable(object):
breakRepr = False
breakStr = False
def __str__(self):
if self.breakStr:
raise self
else:
return '<Breakable>'
def __repr__(self):
if self.breakRepr:
raise self
else:
return 'Breakable()'
class BrokenType(Breakable, type):
breakName = False
def get___name__(self):
if self.breakName:
raise RuntimeError("no name")
return 'BrokenType'
__name__ = property(get___name__)
class BTBase(Breakable):
__metaclass__ = BrokenType
breakRepr = True
breakStr = True
class NoClassAttr(object):
__class__ = property(lambda x: x.not_class)
class SafeRepr(unittest.TestCase):
def testWorkingRepr(self):
x = [1,2,3]
self.assertEquals(reflect.safe_repr(x), repr(x))
def testBrokenRepr(self):
b = Breakable()
b.breakRepr = True
reflect.safe_repr(b)
def testBrokenStr(self):
b = Breakable()
b.breakStr = True
reflect.safe_repr(b)
def testBrokenClassRepr(self):
class X(BTBase):
breakRepr = True
reflect.safe_repr(X)
reflect.safe_repr(X())
def testBrokenClassStr(self):
class X(BTBase):
breakStr = True
reflect.safe_repr(X)
reflect.safe_repr(X())
def testBroken__Class__Attr(self):
reflect.safe_repr(NoClassAttr())
def testBroken__Class__Name__Attr(self):
class X(BTBase):
breakName = True
reflect.safe_repr(X())
class SafeStr(unittest.TestCase):
def testWorkingStr(self):
x = [1,2,3]
self.assertEquals(reflect.safe_str(x), str(x))
def testBrokenStr(self):
b = Breakable()
b.breakStr = True
reflect.safe_str(b)
def testBrokenRepr(self):
b = Breakable()
b.breakRepr = True
reflect.safe_str(b)
def testBrokenClassStr(self):
class X(BTBase):
breakStr = True
reflect.safe_str(X)
reflect.safe_str(X())
def testBrokenClassRepr(self):
class X(BTBase):
breakRepr = True
reflect.safe_str(X)
reflect.safe_str(X())
def testBroken__Class__Attr(self):
reflect.safe_str(NoClassAttr())
def testBroken__Class__Name__Attr(self):
class X(BTBase):
breakName = True
reflect.safe_str(X())
| {
"content_hash": "edf074265a027a97c23f36014c31283a",
"timestamp": "",
"source": "github",
"line_count": 327,
"max_line_length": 108,
"avg_line_length": 28.434250764525995,
"alnum_prop": 0.5680791568079157,
"repo_name": "santisiri/popego",
"id": "6423f2e0d8b2320ca6d2f513a3a2b2233f92dfc3",
"size": "9383",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "envs/ALPHA-POPEGO/lib/python2.5/site-packages/twisted/test/test_reflect.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1246"
},
{
"name": "C",
"bytes": "504141"
},
{
"name": "C++",
"bytes": "26125"
},
{
"name": "CSS",
"bytes": "342653"
},
{
"name": "FORTRAN",
"bytes": "4872"
},
{
"name": "GAP",
"bytes": "13267"
},
{
"name": "Genshi",
"bytes": "407"
},
{
"name": "Groff",
"bytes": "17116"
},
{
"name": "HTML",
"bytes": "383181"
},
{
"name": "JavaScript",
"bytes": "1090769"
},
{
"name": "Makefile",
"bytes": "2441"
},
{
"name": "Mako",
"bytes": "376944"
},
{
"name": "Python",
"bytes": "20895618"
},
{
"name": "Ruby",
"bytes": "3380"
},
{
"name": "Shell",
"bytes": "23581"
},
{
"name": "Smarty",
"bytes": "522"
},
{
"name": "TeX",
"bytes": "35712"
}
],
"symlink_target": ""
} |
import os
import re
from time import sleep
from opensextant import Place
from opensextant.gazetteer import DB, estimate_name_bias, GazetteerIndex, get_default_db
from opensextant.utility import replace_diacritics, load_list, get_list
def filter_out_feature(pl: Place, feats):
"""
Filter out places by their feature type or by their name traits.
Long names (> 20 chars) and/or (>2 words) are relatively unique and not filtered.
Otherwise places are filtered if their feature code+class are designated as not useful
for a particular task. Eg., we don't want to tag short river names or streams (H/STM)...ever.
:param pl: Place object
:param feats: Pattern
:return:
"""
if not pl.feature_code:
return False
# Ignore trivial names:
plen = len(pl.name)
if plen < 2:
return True
fc = f"{pl.feature_class}/{pl.feature_code}"
for feat_filter in feats:
if feat_filter.match(fc):
return True
return False
def filter_in_feature(pl: Place, feats):
"""
:param pl: Place
:param feats: feature filters (regex)
:return:
"""
if not feats:
return True
fc = f"{pl.feature_class}/{pl.feature_code}"
for feat_filter in feats:
if feat_filter.match(fc):
return True
return False
def oddball_omissions(pl: Place):
if pl.feature_code == "RGNE":
if " " in pl.name:
toks = pl.name.split(" ")
last_token = toks[-1]
return last_token.isupper() and len(last_token) <= 3
# Awaiting other omission clauses here.
return False
class Finalizer:
def __init__(self, dbf, debug=False):
self.db = DB(dbf)
self.debug = debug
self.inter_country_delay = 2
def adjust_place_id(self):
self.db.create_indices()
decisions = {}
for pl in self.db.list_places(criteria=" where place_id = 'N-1'"):
if not pl.geohash:
continue
# NE outputs place id of NULL or "-1". This should rectify that by pulling in a decent place ID
# Geohash is a horrible way to filter data -- certain states just sit on the boundary of certain boxes.
for ghlen in [3, 2, 1, 0]:
gh = pl.geohash[0:ghlen]
key = f"{pl.country_code}#{pl.adm1}#{pl.feature_class}#{pl.feature_code}#{gh}"
distinct_ids = set([])
if key in decisions:
distinct_ids.add(decisions[key])
else:
# Avoid too much SQL queries ... record decisions made
criteria = f""" and feat_code='{pl.feature_code}'
and adm1 = '{pl.adm1}'
and place_id != 'N-1'
and geohash like '{gh}%'
"""
for model_pl in self.db.list_places(cc=pl.country_code, fc=pl.feature_class, criteria=criteria):
distinct_ids.add(model_pl.place_id)
#
print("IDS for:", pl.name, distinct_ids)
if len(distinct_ids) == 1:
plid = distinct_ids.pop()
decisions[key] = plid
self.db.update_place_id(pl.id, plid)
break
elif len(distinct_ids) > 1:
print("Ambiguous place ID resolution - no adjustment: ", pl.name, pl.country_code)
self.db.commit()
def adjust_bias(self):
self.db.create_indices()
print("Adjust Biasing")
# Fix significant place names: When loading gazetteer data for the first time,
# you do not have a global awareness of names/geography -- so things like biasing "common words" in wordstats
# leads us to mark valid common city names as "too common" and they are filtered out.
# Example: if you encounter "Beijing" (P/PPLX) is seen first and is marked as a common word
# and then "Beijing" (P/PPLC) is seen and exempted. You have two conflicting conclusions on the name "Beijing".
# The result being that only the captial Beijing will ever be used in tagging.
# FIX: loop through all names fc = "A" and P/PPLC, major cities population 200,000 or greater.
# re-mark the name_bias to positive if determined to be common words previously.
names_done = set([])
count_adjusted = 0
# admin_names = self.db.list_admin_names()
flag_fix_major_place_names = True
flag_fix_admin_codes = False # Addressed in-line through PlaceHueristics
if flag_fix_major_place_names:
# ============================================
# Find Names < 30 chars in general name_group where the names represent significant features.
# Recode those feature/names so ANY row by that name is not excluded by the "too common" judgement
sql_clause = """ where
source in ('U', 'N', 'G')
and name_type='N'
and name_group=''
and LENGTH(name) < 30
and feat_class in ('A', 'P')
and feat_code in ('ADM1', 'PPLC', 'PCL', 'PCLI')
and name NOT like '% %' order by name
"""
for pl in self.db.list_places(criteria=sql_clause):
names = {pl.name, replace_diacritics(pl.name)}
for name in names:
if name.lower() in names_done:
continue
if len(name) < 4:
continue
print(f"ADJUST: {name}")
name_bias = estimate_name_bias(name)
# ADJUSTED here is an approximation.
count_adjusted += 1
# For each name, remark each name and it variants.
self.db.update_bias_by_name(name_bias, name)
names_done.add(name.lower())
self.db.commit()
if flag_fix_admin_codes:
# ============================================
flip_ids = []
non_place_codes = os.path.join('etc', 'gazetteer', 'filters', 'non-placenames,admin-codes.csv')
IGNORE_CODES = set(load_list(non_place_codes))
for pl in self.db.list_places(fc="A",
criteria=" and name_type='C' and feat_code in ('ADM1','ADM2') and search_only=1"):
if pl.name in IGNORE_CODES:
continue
flip_ids.append(pl.id)
print(pl)
# Any rows found -- flip their search_only status.
self.db.update_bias(10, flip_ids)
self.db.commit()
def deduplicate(self):
"""
Finalize the gazetteer database to include an cleanup, deduplication, etc.
:return:
"""
#
# Finalize reviews places by country to identify distinct features to promote as primary
# entries and any "duplicates" can be marked as duplicate(dup=1). Primary entries are those such as:
#
# - Unique entries
# - Having attributes such as non-zero id or name bias
# - NGA & USGS gazetteers by default -- other gazetteers will be overlaid where name variants are offered
# for same feature / point ID / location
#
# Easiest way to break down gazetteer is:
# - by Country
# - by feature class or empty non-feature. ... Resolve any low-quality entries with empty feature.
self.db.create_indices()
cc_list = self.db.list_countries()
BASE_SOURCES = {"OA", "OG", "U", "UF", "N", "NF", "ISO"}
for cc in cc_list:
# Collect entries with
print(f"Country '{cc}'")
# base sources via OpenSextant gazetteer: OA, OG, U, UF, N, NF
base_sources = ",".join([f'"{src}"' for src in BASE_SOURCES])
keys = set([])
duplicates = []
# Collect all duplicate names within USGS and NGA base layers
sql = f"""select id, feat_class, source, geohash, adm1, name, place_id
from placenames where cc='{cc}' and source in ({base_sources}) and duplicate=0"""
self._collect_duplicates(sql, keys, duplicates, label="Base")
# De-duplicate other sources that leverage USGS/NGA as base sources.
sql = f"""select id, feat_class, source, geohash, adm1, name, place_id
from placenames where cc='{cc}' and source not in ({base_sources}) and duplicate=0"""
self._collect_duplicates(sql, keys, duplicates, label="Other Sources")
self.db.mark_duplicates(duplicates)
print("Complete De-duplicating")
def _collect_duplicates(self, sql, keys, dups, label="NA"):
"""
specialized sql row is dictionary of "id, feat_class, source, geohash, adm1, name, place_id "
:param sql:
:param keys:
:param dups:
:param label:
:return:
"""
for row in self.db.conn.execute(sql):
fc = row["feat_class"]
loc = row["geohash"]
nm = row["name"].lower()
a1 = row["adm1"]
k = f"{fc}/{loc[0:5]}/{a1}/{nm}"
if k in keys:
if self.debug: print(f"{label} dup: ", row["id"])
dups.append(row["id"])
else:
# Unique entry
keys.add(k)
def index(self, url, features=None, ignore_features=None, ignore_func=None,
ignore_digits=True, ignore_names=False, limit=-1, countries=[]):
"""
:param url: Gazetteer URL
:param features: features to index
:param ignore_features: features to ignore
:param ignore_func: filter function
:param ignore_digits: True if indexer should ignore purely numeric names
:param ignore_names: True if indexer should ignore name_type=N, e.g,. postal
:param limit:
:param countries: array of country codes.
:return:
"""
print("Xponents Gazetteer Finalizer: INDEX")
indexer = GazetteerIndex(url)
# indexer.commit_rate = 100000
indexer.commit_rate = -1
#
filters = []
if ignore_features:
for f in ignore_features:
filters.append(re.compile(f))
inclusion_filters = []
if features:
for f in features:
inclusion_filters.append(re.compile(f))
default_criteria = " and duplicate=0"
if ignore_names:
default_criteria = " and duplicate=0 and name_type!='N'"
# For each row in DB, index to Solr. Maybe organize batches by row ID where dup=0.
cc_list = countries or self.db.list_countries()
for cc in cc_list:
print(f"Country '{cc}'")
for pl in self.db.list_places(cc=cc, criteria=default_criteria, limit=limit):
if ignore_func:
if ignore_func(pl):
continue
if filter_out_feature(pl, filters):
continue
if ignore_digits and pl.name.isdigit():
continue
if not filter_in_feature(pl, inclusion_filters):
continue
indexer.add(pl)
sleep(self.inter_country_delay)
# Done with country
indexer.save(done=True)
print(f"Indexed {indexer.count}")
indexer.save(done=True)
def index_codes(self, url):
print("Xponents Gazetteer Finalizer: INDEX CODES, ABBREV")
indexer = GazetteerIndex(url)
indexer.commit_rate = -1
default_criteria = " where duplicate=0 and name_type!='N'"
for pl in self.db.list_places(criteria=default_criteria):
indexer.add(pl)
print(f"Indexed {indexer.count}")
indexer.save(done=True)
class PostalIndexer(Finalizer):
def __init__(self, dbf, **kwargs):
Finalizer.__init__(self, dbf, **kwargs)
self.inter_country_delay = 1
def finalize(self, limit=-1):
# No optimization on postal codes.
pass
def index(self, url, ignore_digits=False, **kwargs):
# Finalizer indexes postal data as-is. No digit or stop filters.
Finalizer.index(self, url, ignore_digits=False, **kwargs)
if __name__ == "__main__":
from argparse import ArgumentParser
ap = ArgumentParser()
ap.add_argument("operation", help="adjust-id, dedup, adjust-bias, index -- pick one. Run all three in that order")
ap.add_argument("--db", default=get_default_db())
ap.add_argument("--max", help="maximum rows to process for testing", default=-1)
ap.add_argument("--debug", action="store_true", default=False)
ap.add_argument("--solr", help="Solr URL")
ap.add_argument("--optimize", action="store_true", default=False)
ap.add_argument("--postal", action="store_true", default=False)
ap.add_argument("--countries", help="list of country codes CC,CC,...")
args = ap.parse_args()
gaz = None
if args.operation == "index" and args.solr:
cclist = []
if args.countries:
cclist = get_list(args.countries)
# Features not as present in general data include: WELLS, STREAMS, SPRINGS, HILLS.
#
if args.postal:
# Postal Codes
gaz = PostalIndexer(args.db, debug=args.debug)
gaz.stop_filters = None
gaz.index(args.solr, ignore_digits=False, limit=int(args.max), countries=cclist)
else:
gaz = Finalizer(args.db, debug=args.debug)
gaz.index(args.solr, ignore_digits=True, limit=int(args.max), countries=cclist,
ignore_func=oddball_omissions,
ignore_features={"H/WLL.*",
"H/STM[ABCDHIQSBX]+",
"H/SPNG.*",
"T/HLL.*"})
elif args.operation == "adjust-id":
gaz = Finalizer(args.db, debug=args.debug)
gaz.adjust_place_id()
elif args.operation == "adjust-bias":
gaz = Finalizer(args.db, debug=args.debug)
gaz.adjust_bias()
elif args.operation == "dedup":
gaz = Finalizer(args.db, debug=args.debug)
gaz.deduplicate()
# Finish up.
if gaz:
if args.optimize:
gaz.db.optimize()
gaz.db.close()
| {
"content_hash": "54135b4d612c21c5077becf85c9c420d",
"timestamp": "",
"source": "github",
"line_count": 358,
"max_line_length": 124,
"avg_line_length": 40.857541899441344,
"alnum_prop": 0.5528132904901893,
"repo_name": "OpenSextant/Xponents",
"id": "6e74cdec0891ef9029156715ca5347c6c46d30ed",
"size": "14627",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "solr/script/gaz_finalize.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "72623"
},
{
"name": "Java",
"bytes": "1344792"
},
{
"name": "Python",
"bytes": "343525"
},
{
"name": "Shell",
"bytes": "106459"
}
],
"symlink_target": ""
} |
"""
This is the main module, the main interface classes and functions
are available in the top level hid package
"""
from __future__ import absolute_import
from __future__ import print_function
import sys
import ctypes
import threading
import collections
if sys.version_info >= (3,):
import winreg
else:
import _winreg as winreg
from ctypes import c_ubyte, c_ulong, c_ushort, c_wchar, byref, sizeof, \
create_unicode_buffer
from ctypes.wintypes import DWORD
#local modules
from . import helpers
HIDError = helpers.HIDError
from . import winapi
setup_api = winapi.setup_api
hid_dll = winapi.hid_dll
HidP_Input = winapi.HidP_Input
HidP_Output = winapi.HidP_Output
HidP_Feature = winapi.HidP_Feature
HidStatus = winapi.HidStatus
MAX_HID_STRING_LENGTH = 128
if not hasattr(threading.Thread, "is_alive"):
# in python <2.6 is_alive was called isAlive
threading.Thread.is_alive = threading.Thread.isAlive
USAGE = c_ushort
INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value
USAGE_EVENTS = [
HID_EVT_NONE,
HID_EVT_ALL,
HID_EVT_CHANGED,
HID_EVT_PRESSED,
HID_EVT_RELEASED,
HID_EVT_SET,
HID_EVT_CLEAR,
] = list(range(7))
def get_full_usage_id(page_id, usage_id):
"""Convert to composite 32 bit page and usage ids"""
return (page_id << 16) | usage_id
def get_usage_page_id(full_usage_id):
"""Extract 16 bits page id from full usage id (32 bits)"""
return (full_usage_id >> 16) & 0xffff
def get_short_usage_id(full_usage_id):
"""Extract 16 bits usage id from full usage id (32 bits)"""
return full_usage_id & 0xffff
def hid_device_path_exists(device_path, guid = None):
"""Test if required device_path is still valid
(HID device connected to host)
"""
# expecing HID devices
if not guid:
guid = winapi.GetHidGuid()
info_data = winapi.SP_DEVINFO_DATA()
info_data.cb_size = sizeof(winapi.SP_DEVINFO_DATA)
with winapi.DeviceInterfaceSetInfo(guid) as h_info:
for interface_data in winapi.enum_device_interfaces(h_info, guid):
test_device_path = winapi.get_device_path(h_info,
interface_data,
byref(info_data))
if test_device_path == device_path:
return True
# Not any device now with that path
return False
def find_all_hid_devices():
"Finds all HID devices connected to the system"
#
# From DDK documentation (finding and Opening HID collection):
# After a user-mode application is loaded, it does the following sequence
# of operations:
#
# * Calls HidD_GetHidGuid to obtain the system-defined GUID for HIDClass
# devices.
#
# * Calls SetupDiGetClassDevs to obtain a handle to an opaque device
# information set that describes the device interfaces supported by all
# the HID collections currently installed in the system. The
# application should specify DIGCF_PRESENT and DIGCF_INTERFACEDEVICE
# in the Flags parameter passed to SetupDiGetClassDevs.
#
# * Calls SetupDiEnumDeviceInterfaces repeatedly to retrieve all the
# available interface information.
#
# * Calls SetupDiGetDeviceInterfaceDetail to format interface information
# for each collection as a SP_INTERFACE_DEVICE_DETAIL_DATA structure.
# The device_path member of this structure contains the user-mode name
# that the application uses with the Win32 function CreateFile to
# obtain a file handle to a HID collection.
#
# get HID device class guid
guid = winapi.GetHidGuid()
# retrieve all the available interface information.
results = []
required_size = DWORD()
info_data = winapi.SP_DEVINFO_DATA()
info_data.cb_size = sizeof(winapi.SP_DEVINFO_DATA)
with winapi.DeviceInterfaceSetInfo(guid) as h_info:
for interface_data in winapi.enum_device_interfaces(h_info, guid):
device_path = winapi.get_device_path(h_info,
interface_data,
byref(info_data))
parent_device = c_ulong()
#get parent instance id (so we can discriminate on port)
if setup_api.CM_Get_Parent(byref(parent_device),
info_data.dev_inst, 0) != 0: #CR_SUCCESS = 0
parent_device.value = 0 #null
#get unique instance id string
required_size.value = 0
winapi.SetupDiGetDeviceInstanceId(h_info, byref(info_data),
None, 0,
byref(required_size) )
device_instance_id = create_unicode_buffer(required_size.value)
if required_size.value > 0:
winapi.SetupDiGetDeviceInstanceId(h_info, byref(info_data),
device_instance_id, required_size,
byref(required_size) )
hid_device = HidDevice(device_path,
parent_device.value, device_instance_id.value )
else:
hid_device = HidDevice(device_path, parent_device.value )
# add device to results, if not protected
if hid_device.vendor_id:
results.append(hid_device)
return results
class HidDeviceFilter(object):
"""This class allows searching for HID devices currently connected to
the system, it also allows to search for specific devices (by filtering)
"""
def __init__(self, **kwrds):
"""Initialize filter from a named target parameters.
I.e. product_id=0x0123
"""
self.filter_params = kwrds
def get_devices_by_parent(self, hid_filter=None):
"""Group devices returned from filter query in order \
by devcice parent id.
"""
all_devs = self.get_devices(hid_filter)
dev_group = dict()
for hid_device in all_devs:
#keep a list of known devices matching parent device Ids
parent_id = hid_device.get_parent_instance_id()
device_set = dev_group.get(parent_id, [])
device_set.append(hid_device)
if parent_id not in dev_group:
#add new
dev_group[parent_id] = device_set
return dev_group
def get_devices(self, hid_filter = None):
"""Filter a HID device list by current object parameters. Devices
must match the all of the filtering parameters
"""
if not hid_filter: #empty list or called without any parameters
if type(hid_filter) == type(None):
#request to query connected devices
hid_filter = find_all_hid_devices()
else:
return hid_filter
#initially all accepted
results = {}.fromkeys(hid_filter)
#the filter parameters
validating_attributes = list(self.filter_params.keys())
#first filter out restricted access devices
if not len(results):
return {}
for device in list(results.keys()):
if not device.is_active():
del results[device]
if not len(results):
return {}
#filter out
for item in validating_attributes:
if item.endswith("_includes"):
item = item[:-len("_includes")]
elif item.endswith("_mask"):
item = item[:-len("_mask")]
elif item +"_mask" in self.filter_params or item + "_includes" \
in self.filter_params:
continue # value mask or string search is being queried
elif item not in HidDevice.filter_attributes:
continue # field does not exist sys.error.write(...)
#start filtering out
for device in list(results.keys()):
if not hasattr(device, item):
del results[device]
elif item + "_mask" in validating_attributes:
#masked value
if getattr(device, item) & self.filter_params[item + \
"_mask"] != self.filter_params[item] \
& self.filter_params[item + "_mask"]:
del results[device]
elif item + "_includes" in validating_attributes:
#subset item
if self.filter_params[item + "_includes"] not in \
getattr(device, item):
del results[device]
else:
#plain comparison
if getattr(device, item) != self.filter_params[item]:
del results[device]
#
return list(results.keys())
MAX_DEVICE_ID_LEN = 200 + 1 #+EOL (just in case)
class HidDeviceBaseClass(object):
"Utility parent class for main HID device class"
_raw_reports_lock = threading.Lock()
def __init__(self):
"initializer"
pass
class HidDevice(HidDeviceBaseClass):
"""This class is the main interface to physical HID devices"""
MAX_MANUFACTURER_STRING_LEN = 128 #it's actually 126 + 1 (null)
MAX_PRODUCT_STRING_LEN = 128 #it's actually 126 + 1 (null)
MAX_SERIAL_NUMBER_LEN = 64
filter_attributes = ["vendor_id", "product_id", "version_number",
"product_name", "vendor_name"]
def get_parent_instance_id(self):
"""Retreive system instance id (numerical value)"""
return self.parent_instance_id
def get_parent_device(self):
"""Retreive parent device string id"""
if not self.parent_instance_id:
return ""
dev_buffer_type = winapi.c_tchar * MAX_DEVICE_ID_LEN
dev_buffer = dev_buffer_type()
try:
if winapi.CM_Get_Device_ID(self.parent_instance_id, byref(dev_buffer),
MAX_DEVICE_ID_LEN, 0) == 0: #success
return dev_buffer.value
return ""
finally:
del dev_buffer
del dev_buffer_type
def __init__(self, device_path, parent_instance_id = 0, instance_id=""):
"Interface for HID device as referenced by device_path parameter"
#allow safe access (and object browsing)
self.__open_status = False
self.__input_report_templates = dict()
#initialize hardware related vars
self.__button_caps_storage = list()
self.report_set = dict()
self.__evt_handlers = dict()
self.__reading_thread = None
self.__input_processing_thread = None
self.__raw_handler = None
self._input_report_queue = None
self.hid_caps = None
self.ptr_preparsed_data = None
self.hid_handle = None
self.usages_storage = dict()
self.device_path = device_path
self.instance_id = instance_id
self.parent_instance_id = parent_instance_id
self.product_name = ""
self.vendor_name = ""
self.serial_number = ""
self.vendor_id = 0
self.product_id = 0
self.version_number = 0
HidDeviceBaseClass.__init__(self)
# HID device handle first
h_hid = INVALID_HANDLE_VALUE
try:
h_hid = int( winapi.CreateFile(device_path,
winapi.GENERIC_READ | winapi.GENERIC_WRITE,
winapi.FILE_SHARE_READ | winapi.FILE_SHARE_WRITE,
None, winapi.OPEN_EXISTING, 0, 0))
except:
pass
if h_hid == INVALID_HANDLE_VALUE:
return
try:
# get device attributes
hidd_attributes = winapi.HIDD_ATTRIBUTES()
hidd_attributes.cb_size = sizeof(hidd_attributes)
if not hid_dll.HidD_GetAttributes(h_hid, byref(hidd_attributes)):
del hidd_attributes
return #can't read attributes
#set local references
self.vendor_id = hidd_attributes.vendor_id
self.product_id = hidd_attributes.product_id
self.version_number = hidd_attributes.version_number
del hidd_attributes
# manufacturer string
vendor_string_type = c_wchar * self.MAX_MANUFACTURER_STRING_LEN
vendor_name = vendor_string_type()
if not hid_dll.HidD_GetManufacturerString(h_hid,
byref(vendor_name),
sizeof(vendor_name)) or not len(vendor_name.value):
# would be any possibility to get a vendor id table?,
# maybe not worth it
self.vendor_name = "Unknown manufacturer"
else:
self.vendor_name = vendor_name.value
del vendor_name
del vendor_string_type
# string buffer for product string
product_name_type = c_wchar * self.MAX_PRODUCT_STRING_LEN
product_name = product_name_type()
if not hid_dll.HidD_GetProductString(h_hid,
byref(product_name),
sizeof(product_name)) or not len(product_name.value):
# alternate method, refer to windows registry for product
# information
path_parts = device_path[len("\\\\.\\"):].split("#")
h_register = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,
"SYSTEM\\CurrentControlSet\\Enum\\" + \
path_parts[0] + "\\" + \
path_parts[1] + "\\" + \
path_parts[2] )
self.product_name, other = winreg.QueryValueEx(h_register,
"DeviceDesc")
winreg.CloseKey(h_register)
else:
self.product_name = product_name.value
del product_name
del product_name_type
# serial number string
serial_number_string = c_wchar * self.MAX_SERIAL_NUMBER_LEN
serial_number = serial_number_string()
if not hid_dll.HidD_GetSerialNumberString(h_hid,
byref(serial_number),
sizeof(serial_number)) or not len(serial_number.value):
self.serial_number = ""
else:
self.serial_number = serial_number.value
del serial_number
del serial_number_string
finally:
# clean up
winapi.CloseHandle(h_hid)
def is_active(self):
"""Poll if device is still valid"""
if not self.vendor_id:
return False
return True
def open(self, output_only = False, shared = True):
"""Open HID device and obtain 'Collection Information'.
It effectively prepares the HidDevice object for reading and writing
"""
if self.is_opened():
raise HIDError("Device already opened")
sharing_flags = 0
if shared:
sharing_flags = winapi.FILE_SHARE_READ | winapi.FILE_SHARE_WRITE
hid_handle = winapi.CreateFile(
self.device_path,
winapi.GENERIC_READ | winapi.GENERIC_WRITE,
sharing_flags,
None, # no security
winapi.OPEN_EXISTING,
winapi.FILE_ATTRIBUTE_NORMAL | winapi.FILE_FLAG_OVERLAPPED,
0 )
if not hid_handle or hid_handle == INVALID_HANDLE_VALUE:
raise HIDError("Error opening HID device: %s\n"%self.product_name)
#get pre parsed data
ptr_preparsed_data = ctypes.c_void_p()
if not hid_dll.HidD_GetPreparsedData(int(hid_handle),
byref(ptr_preparsed_data)):
winapi.CloseHandle(int(hid_handle))
raise HIDError("Failure to get HID pre parsed data")
self.ptr_preparsed_data = ptr_preparsed_data
self.hid_handle = hid_handle
#get top level capabilities
self.hid_caps = winapi.HIDP_CAPS()
HidStatus( hid_dll.HidP_GetCaps(ptr_preparsed_data,
byref(self.hid_caps)) )
#proceed with button capabilities
caps_length = c_ulong()
all_items = [\
(HidP_Input, winapi.HIDP_BUTTON_CAPS,
self.hid_caps.number_input_button_caps,
hid_dll.HidP_GetButtonCaps
),
(HidP_Input, winapi.HIDP_VALUE_CAPS,
self.hid_caps.number_input_value_caps,
hid_dll.HidP_GetValueCaps
),
(HidP_Output, winapi.HIDP_BUTTON_CAPS,
self.hid_caps.number_output_button_caps,
hid_dll.HidP_GetButtonCaps
),
(HidP_Output, winapi.HIDP_VALUE_CAPS,
self.hid_caps.number_output_value_caps,
hid_dll.HidP_GetValueCaps
),
(HidP_Feature, winapi.HIDP_BUTTON_CAPS,
self.hid_caps.number_feature_button_caps,
hid_dll.HidP_GetButtonCaps
),
(HidP_Feature, winapi.HIDP_VALUE_CAPS,
self.hid_caps.number_feature_value_caps,
hid_dll.HidP_GetValueCaps
),
]
for report_kind, struct_kind, max_items, get_control_caps in all_items:
if not int(max_items):
continue #nothing here
#create storage for control/data
ctrl_array_type = struct_kind * max_items
ctrl_array_struct = ctrl_array_type()
#target max size for API function
caps_length.value = max_items
HidStatus( get_control_caps(\
report_kind,
byref(ctrl_array_struct),
byref(caps_length),
ptr_preparsed_data) )
#keep reference of usages
for idx in range(caps_length.value):
usage_item = HidPUsageCaps( ctrl_array_struct[idx] )
#by report type
if report_kind not in self.usages_storage:
self.usages_storage[report_kind] = list()
self.usages_storage[report_kind].append( usage_item )
#also add report_id to known reports set
if report_kind not in self.report_set:
self.report_set[report_kind] = set()
self.report_set[report_kind].add( usage_item.report_id )
del ctrl_array_struct
del ctrl_array_type
# now is the time to consider the device opened, as report
# handling threads enforce it
self.__open_status = True
#now prepare the input report handler
self.__input_report_templates = dict()
if not output_only and self.hid_caps.input_report_byte_length and \
HidP_Input in self.report_set:
#first make templates for easy parsing input reports
for report_id in self.report_set[HidP_Input]:
self.__input_report_templates[report_id] = \
HidReport( self, HidP_Input, report_id )
#prepare input reports handlers
self._input_report_queue = HidDevice.InputReportQueue( \
self.max_input_queue_size,
self.hid_caps.input_report_byte_length)
self.__input_processing_thread = \
HidDevice.InputReportProcessingThread(self)
self.__reading_thread = HidDevice.InputReportReaderThread( \
self, self.hid_caps.input_report_byte_length)
# clean up
def get_physical_descriptor(self):
"""Returns physical HID device descriptor
"""
raw_data_type = c_ubyte * 1024
raw_data = raw_data_type()
if hid_dll.HidD_GetPhysicalDescriptor(self.hid_handle,
byref(raw_data), 1024 ):
return [x for x in raw_data]
return []
def send_output_report(self, data):
"""Send input/output/feature report ID = report_id, data should be a
c_ubyte object with included the required report data
"""
assert( self.is_opened() )
#make sure we have c_ubyte array storage
if not ( isinstance(data, ctypes.Array) and \
issubclass(data._type_, c_ubyte) ):
raw_data_type = c_ubyte * len(data)
raw_data = raw_data_type()
for index in range( len(data) ):
raw_data[index] = data[index]
else:
raw_data = data
#
# Adding a lock when writing (overlapped writes)
over_write = winapi.OVERLAPPED()
over_write.h_event = winapi.CreateEvent(None, 0, 0, None)
if over_write.h_event:
try:
overlapped_write = over_write
winapi.WriteFile(int(self.hid_handle), byref(raw_data), len(raw_data),
None, byref(overlapped_write)) #none overlapped
error = ctypes.GetLastError()
if error == winapi.ERROR_IO_PENDING:
# overlapped operation in progress
result = error
elif error == 1167:
raise HIDError("Error device disconnected before write")
else:
raise HIDError("Error %d when trying to write to HID "\
"device: %s"%(error, ctypes.FormatError(error)) )
result = winapi.WaitForSingleObject(overlapped_write.h_event, 10000 )
if result != winapi.WAIT_OBJECT_0:
# If the write times out make sure to
# cancel it, otherwise memory could
# get corrupted if the async write
# completes after this functions returns
winapi.CancelIo( int(self.hid_handle) )
raise HIDError("Write timed out")
finally:
# Make sure the event is closed so resources aren't leaked
winapi.CloseHandle(over_write.h_event)
else:
return winapi.WriteFile(int(self.hid_handle), byref(raw_data),
len(raw_data),
None, None) #none overlapped
return True #completed
def send_feature_report(self, data):
"""Send input/output/feature report ID = report_id, data should be a
c_byte object with included the required report data
"""
assert( self.is_opened() )
#make sure we have c_ubyte array storage
if not ( isinstance(data, ctypes.Array) and issubclass(data._type_,
c_ubyte) ):
raw_data_type = c_ubyte * len(data)
raw_data = raw_data_type()
for index in range( len(data) ):
raw_data[index] = data[index]
else:
raw_data = data
return hid_dll.HidD_SetFeature(int(self.hid_handle), byref(raw_data),
len(raw_data))
def __reset_vars(self):
"""Reset vars (for init or gc)"""
self.__button_caps_storage = list()
self.usages_storage = dict()
self.report_set = dict()
self.ptr_preparsed_data = None
self.hid_handle = None
#don't clean up the report queue because the
#consumer & producer threads might needed it
self.__evt_handlers = dict()
#other
self.__reading_thread = None
self.__input_processing_thread = None
self._input_report_queue = None
#
def is_plugged(self):
"""Check if device still plugged to USB host"""
return self.device_path and hid_device_path_exists(self.device_path)
def is_opened(self):
"""Check if device path resource open status"""
return self.__open_status
def close(self):
"""Release system resources"""
# free parsed data
if not self.is_opened():
return
self.__open_status = False
# abort all running threads first
if self.__reading_thread and self.__reading_thread.is_alive():
self.__reading_thread.abort()
#avoid posting new reports
if self._input_report_queue:
self._input_report_queue.release_events()
if self.__input_processing_thread and \
self.__input_processing_thread.is_alive():
self.__input_processing_thread.abort()
#properly close API handlers and pointers
if self.ptr_preparsed_data:
ptr_preparsed_data = self.ptr_preparsed_data
self.ptr_preparsed_data = None
hid_dll.HidD_FreePreparsedData(ptr_preparsed_data)
# wait for the reading thread to complete before closing device handle
if self.__reading_thread:
self.__reading_thread.join()
if self.hid_handle:
winapi.CloseHandle(self.hid_handle)
# make sure report procesing thread is closed
if self.__input_processing_thread:
self.__input_processing_thread.join()
#reset vars (for GC)
button_caps_storage = self.__button_caps_storage
self.__reset_vars()
while button_caps_storage:
item = button_caps_storage.pop()
del item
def __find_reports(self, report_type, usage_page, usage_id = 0):
"Find input report referencing HID usage control/data item"
if not self.is_opened():
raise HIDError("Device must be opened")
#
results = list()
if usage_page:
for report_id in self.report_set.get( report_type, set() ):
#build report object, gathering usages matching report_id
report_obj = HidReport(self, report_type, report_id)
if get_full_usage_id(usage_page, usage_id) in report_obj:
results.append( report_obj )
else:
#all (any one)
for report_id in self.report_set.get(report_type, set()):
report_obj = HidReport(self, report_type, report_id)
results.append( report_obj )
return results
def count_all_feature_reports(self):
"""Retreive total number of available feature reports"""
return self.hid_caps.number_feature_button_caps + \
self.hid_caps.number_feature_value_caps
def find_input_reports(self, usage_page = 0, usage_id = 0):
"Find input reports referencing HID usage item"
return self.__find_reports(HidP_Input, usage_page, usage_id)
def find_output_reports(self, usage_page = 0, usage_id = 0):
"Find output report referencing HID usage control/data item"
return self.__find_reports(HidP_Output, usage_page, usage_id)
def find_feature_reports(self, usage_page = 0, usage_id = 0):
"Find feature report referencing HID usage control/data item"
return self.__find_reports(HidP_Feature, usage_page, usage_id)
def find_any_reports(self, usage_page = 0, usage_id = 0):
"""Find any report type referencing HID usage control/data item.
Results are returned in a dictionary mapping report_type to usage
lists.
"""
items = [
(HidP_Input, self.find_input_reports(usage_page, usage_id)),
(HidP_Output, self.find_output_reports(usage_page, usage_id)),
(HidP_Feature, self.find_feature_reports(usage_page, usage_id)),
]
return dict([(t, r) for t, r in items if r])
max_input_queue_size = 20
evt_decision = {
#a=old_value, b=new_value
HID_EVT_NONE: lambda a,b: False,
HID_EVT_ALL: lambda a,b: True, #usage in report
HID_EVT_CHANGED: lambda a,b: a != b,
HID_EVT_PRESSED: lambda a,b: b and not a,
HID_EVT_RELEASED: lambda a,b: a and not b,
HID_EVT_SET: lambda a,b: bool(b),
HID_EVT_CLEAR: lambda a,b: not b,
}
@helpers.synchronized(HidDeviceBaseClass._raw_reports_lock)
def _process_raw_report(self, raw_report):
"Default raw input report data handler"
if not self.is_opened():
return
if not self.__evt_handlers and not self.__raw_handler:
return
if not raw_report[0] and \
(raw_report[0] not in self.__input_report_templates):
# windows sends an empty array when disconnecting
# but, this might have a collision with report_id = 0
if not hid_device_path_exists(self.device_path):
#windows XP sends empty report when disconnecting
self.__reading_thread.abort() #device disconnected
return
if self.__raw_handler:
#this might slow down data throughput, but at the expense of safety
self.__raw_handler(helpers.ReadOnlyList(raw_report))
return
# using pre-parsed report templates, by report id
report_template = self.__input_report_templates[raw_report[0]]
# old condition snapshot
old_values = report_template.get_usages()
# parse incoming data
report_template.set_raw_data(raw_report)
# and compare it
event_applies = self.evt_decision
evt_handlers = self.__evt_handlers
for key in report_template.keys():
if key not in evt_handlers:
continue
#check if event handler exist!
for event_kind, handlers in evt_handlers[key].items():
#key=event_kind, values=handler set
new_value = report_template[key].value
if not event_applies[event_kind](old_values[key], new_value):
continue
#decision applies, call handlers
for function_handler in handlers:
#check if the application wants some particular parameter
if handlers[function_handler]:
function_handler(new_value,
event_kind, handlers[function_handler])
else:
function_handler(new_value, event_kind)
def set_raw_data_handler(self, funct):
"Set external raw data handler, set to None to restore default"
self.__raw_handler = funct
def find_input_usage(self, full_usage_id):
"""Check if full usage Id included in input reports set
Parameters:
full_usage_id Full target usage, use get_full_usage_id
Returns:
Report ID as integer value, or None if report does not exist with
target usage. Nottice that report ID 0 is a valid report.
"""
for report_id, report_obj in self.__input_report_templates.items():
if full_usage_id in report_obj:
return report_id
return None #report_id might be 0
def add_event_handler(self, full_usage_id, handler_function,
event_kind = HID_EVT_ALL, aux_data = None):
"""Add event handler for usage value/button changes,
returns True if the handler function was updated"""
report_id = self.find_input_usage(full_usage_id)
if report_id != None:
# allow first zero to trigger changes and releases events
self.__input_report_templates[report_id][full_usage_id].__value = None
if report_id == None or not handler_function:
# do not add handler
return False
assert(isinstance(handler_function, collections.Callable)) # must be a function
# get dictionary for full usages
top_map_handler = self.__evt_handlers.get(full_usage_id, dict())
event_handler_set = top_map_handler.get(event_kind, dict())
# update handler
event_handler_set[handler_function] = aux_data
if event_kind not in top_map_handler:
top_map_handler[event_kind] = event_handler_set
if full_usage_id not in self.__evt_handlers:
self.__evt_handlers[full_usage_id] = top_map_handler
return True
class InputReportQueue(object):
"""Multi-threaded queue. Allows to queue reports from reading thread"""
def __init__(self, max_size, report_size):
self.__locked_down = False
self.max_size = max_size
self.repport_buffer_type = c_ubyte * report_size
self.used_queue = []
self.fresh_queue = []
self.used_lock = threading.Lock()
self.fresh_lock = threading.Lock()
self.posted_event = threading.Event()
#@logging_decorator
def get_new(self):
"Allocates storage for input report"
if self.__locked_down:
return None
self.used_lock.acquire()
if len(self.used_queue):
#we can reuse items
empty_report = self.used_queue.pop(0)
self.used_lock.release()
ctypes.memset(empty_report, 0, sizeof(empty_report))
else:
self.used_lock.release()
#create brand new storage
#auto initialized to '0' by ctypes
empty_report = self.repport_buffer_type()
return empty_report
def reuse(self, raw_report):
"Reuse not posted report"
if self.__locked_down:
return
if not raw_report:
return
self.used_lock.acquire()
#we can reuse this item
self.used_queue.append(raw_report)
self.used_lock.release()
#@logging_decorator
def post(self, raw_report):
"""Used by reading thread to post a new input report."""
if self.__locked_down:
self.posted_event.set()
return
self.fresh_lock.acquire()
self.fresh_queue.append( raw_report )
self.posted_event.set()
self.fresh_lock.release()
#@logging_decorator
def get(self):
"""Used to retreive one report form the queue"""
if self.__locked_down:
return None
#wait for data
self.posted_event.wait()
self.fresh_lock.acquire()
if self.__locked_down:
self.fresh_lock.release()
return None
item = self.fresh_queue.pop(0)
if not self.fresh_queue:
# emtpy
self.posted_event.clear()
self.fresh_lock.release()
return item
def release_events(self):
"""Release thread locks."""
self.__locked_down = True
self.posted_event.set()
class InputReportProcessingThread(threading.Thread):
"Input reports handler helper class"
def __init__(self, hid_object):
threading.Thread.__init__(self)
self.__abort = False
self.hid_object = hid_object
self.daemon = True
self.start()
def abort(self):
"""Cancel processing."""
self.__abort = True
def run(self):
"""Start collecting input reports and post it to subscribed
Hid device"""
hid_object = self.hid_object
report_queue = hid_object._input_report_queue
while not self.__abort and hid_object.is_opened():
raw_report = report_queue.get()
if not raw_report or self.__abort:
break
hid_object._process_raw_report(raw_report)
# reuse the report (avoid allocating new memory)
report_queue.reuse(raw_report)
class InputReportReaderThread(threading.Thread):
"Helper to receive input reports"
def __init__(self, hid_object, raw_report_size):
threading.Thread.__init__(self)
self.__abort = False
self.__active = False
self.hid_object = hid_object
self.report_queue = hid_object._input_report_queue
hid_handle = int( hid_object.hid_handle )
self.raw_report_size = raw_report_size
self.__h_read_event = None
self.__abort_lock = threading.RLock()
if hid_object and hid_handle and self.raw_report_size \
and self.report_queue:
#only if input reports are available
self.daemon = True
self.start()
else:
hid_object.close()
def abort(self):
"""Stop collectiong reports."""
with self.__abort_lock:
if not self.__abort:
# The abort variable must be set to true
# before sending the event, otherwise
# the reader thread might skip
# CancelIo
self.__abort = True
if self.__h_read_event:
# force overlapped events competition
winapi.SetEvent(self.__h_read_event)
def is_active(self):
"main reading loop is running (bool)"
return bool(self.__active)
def run(self):
if not self.raw_report_size:
# don't raise any error as the hid object can still be used
# for writing reports
raise HIDError("Attempting to read input reports on non "\
"capable HID device")
over_read = winapi.OVERLAPPED()
self.__h_read_event = winapi.CreateEvent(None, 0, 0, None)
over_read.h_event = self.__h_read_event
if not over_read.h_event:
raise HIDError("Error when create hid event resource")
try:
bytes_read = c_ulong()
#
hid_object = self.hid_object
input_report_queue = self.report_queue
report_len = self.raw_report_size
#main loop active
self.__active = True
while not self.__abort:
#get storage
buf_report = input_report_queue.get_new()
if not buf_report or self.__abort:
break
bytes_read.value = 0
with self.__abort_lock:
# Call to ReadFile must only be done if
# abort isn't set.
if self.__abort:
break
# async read from device
result = winapi.ReadFile(hid_object.hid_handle,
byref(buf_report), report_len, byref(bytes_read),
byref(over_read) )
if not result:
error = ctypes.GetLastError()
if error == winapi.ERROR_IO_PENDING:
# overlapped operation in progress
result = error
elif error == 1167:
# device disconnected
break
else:
raise HIDError("Error %d when trying to read from HID "\
"device: %s"%(error, ctypes.FormatError(error)) )
if result == winapi.ERROR_IO_PENDING:
#wait for event
result = winapi.WaitForSingleObject( \
over_read.h_event,
winapi.INFINITE )
if result != winapi.WAIT_OBJECT_0 or self.__abort: #success
#Cancel the ReadFile call. The read must not be in
#progress when run() returns, since the buffers used
#in the call will go out of scope and get freed. If
#new data arrives (the read finishes) after these
#buffers have been freed then this can cause python
#to crash.
winapi.CancelIo( hid_object.hid_handle )
break #device has being disconnected
# signal raw data already read
input_report_queue.post( buf_report )
finally:
#clean up
self.__active = False
self.__abort = True
self.__h_read_event = None #delete read event so it isn't be used by abort()
winapi.CloseHandle(over_read.h_event)
del over_read
def __repr__(self):
return "HID device (vID=0x%04x, pID=0x%04x, v=0x%04x); %s; %s, " \
"Path: %s" % (self.vendor_id, self.product_id, self.version_number,\
self.vendor_name, self.product_name, self.device_path)
class ReportItem(object):
"""Represents a single usage field in a report."""
def __init__(self, hid_report, caps_record, usage_id = 0):
# from here we can get the parent hid_object
self.hid_report = hid_report
self.__is_button = caps_record.is_button
self.__is_value = caps_record.is_value
self.__is_value_array = bool(self.__is_value and \
caps_record.report_count > 1)
self.__bit_size = 1
self.__report_count = 1
if not caps_record.is_range:
self.usage_id = caps_record.usage
else:
self.usage_id = usage_id
self.__report_id_value = caps_record.report_id
self.page_id = caps_record.usage_page
self.__value = 0
if caps_record.is_range:
#reference to usage within usage range
offset = usage_id - caps_record.usage_min
self.data_index = caps_record.data_index_min + offset
self.string_index = caps_record.string_min + offset
self.designator_index = caps_record.designator_min + offset
else:
#straight reference
self.data_index = caps_record.data_index
self.string_index = caps_record.string_index
self.designator_index = caps_record.designator_index
#verify it item is value array
if self.__is_value:
if self.__is_value_array:
byte_size = int((caps_record.bit_size * caps_record.report_count)//8)
if (caps_record.bit_size * caps_record.report_count) % 8:
# TODO: This seems not supported by Windows
byte_size += 1
value_type = c_ubyte * byte_size
self.__value = value_type()
self.__bit_size = caps_record.bit_size
self.__report_count = caps_record.report_count
def __len__(self):
return self.__report_count
def __setitem__(self, index, value):
"Allow to access value array by index"
if not self.__is_value_array:
raise ValueError("Report item is not value usage array")
if index < self.__report_count:
byte_index = int( (index * self.__bit_size) // 8 )
bit_index = (index * self.__bit_size) % 8
bit_mask = ((1 << self.__bit_size) - 1)
self.__value[byte_index] &= ~(bit_mask << bit_index)
self.__value[byte_index] |= (value & bit_mask) << bit_index
else:
raise IndexError
def __getitem__(self, index):
"Allow to access value array by index"
if not self.__is_value_array:
raise ValueError("Report item is not value usage array")
if index < self.__report_count:
byte_index = int( (index * self.__bit_size) // 8 )
bit_index = (index * self.__bit_size) % 8
return ((self.__value[byte_index] >> bit_index) & \
((1 << self.__bit_size) - 1) )
else:
raise IndexError
def set_value(self, value):
"""Set usage value within report"""
if self.__is_value_array:
if len(value) == self.__report_count:
for index, item in enumerate(value):
self.__setitem__(index, item)
else:
raise ValueError("Value size should match report item size "\
"length" )
else:
self.__value = value & ((1 << self.__bit_size) - 1) #valid bits only
def get_value(self):
"""Retreive usage value within report"""
if self.__is_value_array:
if self.__bit_size == 8: #matching c_ubyte
return list(self.__value)
else:
result = []
for i in range(self.__report_count):
result.append(self.__getitem__(i))
return result
else:
return self.__value
#value property
value = property(get_value, set_value)
@property
def value_array(self):
"""Retreive usage value as value array"""
#read only property
return self.__value
def key(self):
"returns unique usage page & id long value"
return (self.page_id << 16) | self.usage_id
def is_value(self):
"""Validate if usage is value (not 'button')"""
return self.__is_value
def is_button(self):
"""Validate if usage is button (not value)"""
return self.__is_button
def is_value_array(self):
"""Validate if usage was described as value array"""
return self.__is_value_array
def get_usage_string(self):
"""Returns usage representation string (as embedded in HID device
if available)
"""
if self.string_index:
usage_string_type = c_wchar * MAX_HID_STRING_LENGTH
# 128 max string length
abuffer = usage_string_type()
hid_dll.HidD_GetIndexedString(
self.hid_report.get_hid_object().hid_handle,
self.string_index,
byref(abuffer), MAX_HID_STRING_LENGTH-1 )
return abuffer.value
return ""
#read only properties
@property
def report_id(self):
"""Retreive Report Id numeric value"""
return self.__report_id_value
def __repr__(self):
res = []
if self.string_index:
res.append( self.get_usage_string() )
res.append( "page_id=%s"%hex(self.page_id) )
res.append( "usage_id=%s"%hex(self.usage_id) )
if self.__value != None:
res.append( "value=%s" % str(self.get_value()))
else:
res.append( "value=[None])" )
usage_type = ""
if self.is_button():
usage_type = "Button"
elif self.is_value():
usage_type = "Value"
return usage_type + "Usage item, %s (" % hex(get_full_usage_id ( \
self.page_id, self.usage_id)) + ', '.join(res) + ')'
# class ReportItem finishes ***********************
class HidReport(object):
"""This class interfaces an actual HID physical report, providing a wrapper
that exposes specific usages (usage page and usage ID) as a usage_id value
map (dictionary).
Example: A HID device might have an output report ID = 0x01, with the
following usages; 0x20 as a boolean (button), and 0x21 as a 3 bit value,
then querying the HID object for the output report (by using
hid_object.get_output_report(0x01))
"""
#
def __init__(self, hid_object, report_type, report_id):
hid_caps = hid_object.hid_caps
if report_type == HidP_Input:
self.__raw_report_size = hid_caps.input_report_byte_length
elif report_type == HidP_Output:
self.__raw_report_size = hid_caps.output_report_byte_length
elif report_type == HidP_Feature:
self.__raw_report_size = hid_caps.feature_report_byte_length
else:
raise HIDError("Unsupported report type")
self.__report_kind = report_type #target report type
self.__value_array_items = list() #array of usages items
self.__hid_object = hid_object #parent hid object
self.__report_id = c_ubyte(report_id) #target report Id
self.__items = dict() #access items by 'full usage' key
self.__idx_items = dict() #access internal items by HID DLL usage index
self.__raw_data = None #buffer storage (if needed)
self.__usage_data_list = None #hid API HIDP_DATA array (if allocated)
#build report items list, browse parent hid object for report items
for item in hid_object.usages_storage.get(report_type, []):
if item.report_id == report_id:
if not item.is_range:
#regular 'single' usage
report_item = ReportItem(self, item)
self.__items[report_item.key()] = report_item
self.__idx_items[report_item.data_index] = report_item
#item is value array?
if report_item.is_value_array():
self.__value_array_items.append(report_item)
else:
for usage_id in range(item.usage_min,
item.usage_max+1):
report_item = ReportItem(self, item, usage_id)
self.__items[report_item.key()] = report_item
self.__idx_items[report_item.data_index] = report_item
#
#
__report_kind_dict = {
HidP_Input: "Input",
HidP_Output: "Output",
HidP_Feature: "Feature",
}
#read only properties
@property
def report_id(self):
"""Retreive asociated report Id value"""
return self.__report_id.value
@property
def report_type(self):
"""Retreive report type as numeric value (input, output, feature)"""
return self.__report_kind_dict[self.__report_kind]
@property
def hid_object(self):
"""Retreive asociated HID device instance"""
return self.__hid_object
def __repr__(self):
return "HID report object (%s report, id=0x%02x), %d items included" \
% (self.report_type, self.__report_id.value, len(self.__items) )
def __getitem__(self, key):
if isinstance(key, ReportItem):
key = key.key()
return self.__items[key]
def __setitem__(self, key, value):
"""set report item value"""
item = self.__getitem__(key)
item.value = value
def __contains__(self, key):
if isinstance(key, ReportItem):
key = key.key()
return key in self.__items
def __len__(self):
return len(self.__items)
def has_key(self, key):
"""Test for key (as standard dicts)"""
return self.__contains__(key)
def items(self):
"""Return key, value pairs (as standard dicts)"""
return list(self.__items.items())
def keys(self):
"""Return stored element keys (as standard dicts)"""
return self.__items.keys()
def values(self):
"""Return stored elements (as standard dicts)"""
return self.__items.values()
def get_hid_object(self):
"""Retreive reference to parent HID device"""
return self.__hid_object
def get_usages(self):
"Return a dictionary mapping full usages Ids to plain values"
result = dict()
for key, usage in self.items():
result[key] = usage.value
return result
def __alloc_raw_data(self, initial_values=None):
"""Pre-allocate re-usagle memory"""
#allocate c_ubyte storage
if self.__raw_data == None: #first time only, create storage
raw_data_type = c_ubyte * self.__raw_report_size
self.__raw_data = raw_data_type()
elif initial_values == self.__raw_data:
# already
return
else:
#initialize
ctypes.memset(self.__raw_data, 0, len(self.__raw_data))
if initial_values:
for index in range(len(initial_values)):
self.__raw_data[index] = initial_values[index]
def set_raw_data(self, raw_data):
"""Set usage values based on given raw data, item[0] is report_id,
length should match 'raw_data_length' value, best performance if
raw_data is c_ubyte ctypes array object type
"""
#pre-parsed data should exist
assert(self.__hid_object.is_opened())
#valid length
if len(raw_data) != self.__raw_report_size:
raise HIDError( "Report size has to be %d elements (bytes)" \
% self.__raw_report_size )
# copy to internal storage
self.__alloc_raw_data(raw_data)
if not self.__usage_data_list: # create HIDP_DATA buffer
max_items = hid_dll.HidP_MaxDataListLength(self.__report_kind,
self.__hid_object.ptr_preparsed_data)
data_list_type = winapi.HIDP_DATA * max_items
self.__usage_data_list = data_list_type()
#reference HIDP_DATA buffer
data_list = self.__usage_data_list
data_len = c_ulong(len(data_list))
#reset old values
for item in self.values():
if item.is_value_array():
item.value = [0, ]*len(item)
else:
item.value = 0
#ready, parse raw data
HidStatus( hid_dll.HidP_GetData(self.__report_kind,
byref(data_list), byref(data_len),
self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), len(self.__raw_data)) )
#set values on internal report item objects
for idx in range(data_len.value):
value_item = data_list[idx]
report_item = self.__idx_items.get(value_item.data_index)
if not report_item:
# This is not expected to happen
continue
if report_item.is_value():
report_item.value = value_item.value.raw_value
elif report_item.is_button():
report_item.value = value_item.value.on
else:
pass # HID API should give us either, at least one of 'em
#get values of array items
for item in self.__value_array_items:
#ask hid API to parse
HidStatus( hid_dll.HidP_GetUsageValueArray(self.__report_kind,
item.page_id,
0, #link collection
item.usage_id, #short usage
byref(item.value_array), #output data (c_ubyte storage)
len(item.value_array), self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), len(self.__raw_data)) )
#
def __prepare_raw_data(self):
"Format internal __raw_data storage according to usages setting"
#pre-parsed data should exist
if not self.__hid_object.ptr_preparsed_data:
raise HIDError("HID object close or unable to request pre parsed "\
"report data")
# make sure pre-memory allocation already done
self.__alloc_raw_data()
try:
HidStatus( hid_dll.HidP_InitializeReportForID(self.__report_kind,
self.__report_id, self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), self.__raw_report_size) )
#
except HIDError:
self.__raw_data[0] = self.__report_id
#check if we have pre-allocated usage storage
if not self.__usage_data_list: # create HIDP_DATA buffer
max_items = hid_dll.HidP_MaxDataListLength(self.__report_kind,
self.__hid_object.ptr_preparsed_data)
if not max_items:
raise HIDError("Internal error while requesting usage length")
data_list_type = winapi.HIDP_DATA * max_items
self.__usage_data_list = data_list_type()
#reference HIDP_DATA buffer
data_list = self.__usage_data_list
#set buttons and values usages first
n_total_usages = 0
single_usage = USAGE()
single_usage_len = c_ulong()
for data_index, report_item in self.__idx_items.items():
if (not report_item.is_value_array()) and \
report_item.value != None:
#set by user, include in request
if report_item.is_button() and report_item.value:
# windows just can't handle button arrays!, we just don't
# know if usage is button array or plain single usage, so
# we set all usages at once
single_usage.value = report_item.usage_id
single_usage_len.value = 1
HidStatus( hid_dll.HidP_SetUsages(self.__report_kind,
report_item.page_id, 0,
byref(single_usage), byref(single_usage_len),
self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), self.__raw_report_size) )
continue
elif report_item.is_value() and \
not report_item.is_value_array():
data_list[n_total_usages].value.raw_value = report_item.value
else:
continue #do nothing
data_list[n_total_usages].reserved = 0 #reset
data_list[n_total_usages].data_index = data_index #reference
n_total_usages += 1
#set data if any usage is not 'none' (and not any value array)
if n_total_usages:
#some usages set
usage_len = c_ulong(n_total_usages)
HidStatus( hid_dll.HidP_SetData(self.__report_kind,
byref(data_list), byref(usage_len),
self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), self.__raw_report_size) )
#set values based on value arrays
for report_item in self.__value_array_items:
HidStatus( hid_dll.HidP_SetUsageValueArray(self.__report_kind,
report_item.page_id,
0, #all link collections
report_item.usage_id,
byref(report_item.value_array),
len(report_item.value_array),
self.__hid_object.ptr_preparsed_data, byref(self.__raw_data),
len(self.__raw_data)) )
def get_raw_data(self):
"""Get raw HID report based on internal report item settings,
creates new c_ubytes storage
"""
if self.__report_kind != HidP_Output \
and self.__report_kind != HidP_Feature:
raise HIDError("Only for output or feature reports")
self.__prepare_raw_data()
#return read-only object for internal storage
return helpers.ReadOnlyList(self.__raw_data)
def send(self, raw_data = None):
"""Prepare HID raw report (unless raw_data is provided) and send
it to HID device
"""
if self.__report_kind != HidP_Output \
and self.__report_kind != HidP_Feature:
raise HIDError("Only for output or feature reports")
#valid length
if raw_data and (len(raw_data) != self.__raw_report_size):
raise HIDError("Report size has to be %d elements (bytes)" \
% self.__raw_report_size)
#should be valid report id
if raw_data and raw_data[0] != self.__report_id.value:
#hint, raw_data should be a plain list of integer values
raise HIDError("Not matching report id")
#
if self.__report_kind != HidP_Output and \
self.__report_kind != HidP_Feature:
raise HIDError("Can only send output or feature reports")
#
if not raw_data:
# we'll construct the raw report
self.__prepare_raw_data()
elif not ( isinstance(raw_data, ctypes.Array) and \
issubclass(raw_data._type_, c_ubyte) ):
# pre-memory allocation for performance
self.__alloc_raw_data(raw_data)
#reference proper object
raw_data = self.__raw_data
if self.__report_kind == HidP_Output:
return self.__hid_object.send_output_report(raw_data)
elif self.__report_kind == HidP_Feature:
return self.__hid_object.send_feature_report(raw_data)
else:
pass #can't get here (yet)
def get(self, do_process_raw_report = True):
"Read report from device"
assert(self.__hid_object.is_opened())
if self.__report_kind != HidP_Input and \
self.__report_kind != HidP_Feature:
raise HIDError("Only for input or feature reports")
# pre-alloc raw data
self.__alloc_raw_data()
# now use it
raw_data = self.__raw_data
raw_data[0] = self.__report_id
read_function = None
if self.__report_kind == HidP_Feature:
read_function = hid_dll.HidD_GetFeature
elif self.__report_kind == HidP_Input:
read_function = hid_dll.HidD_GetInputReport
if read_function and read_function(int(self.__hid_object.hid_handle),
byref(raw_data), len(raw_data)):
#success
if do_process_raw_report:
self.set_raw_data(raw_data)
self.__hid_object._process_raw_report(raw_data)
return helpers.ReadOnlyList(raw_data)
return helpers.ReadOnlyList([])
#class HIDReport finishes ***********************
class HidPUsageCaps(object):
"""Allow to keep usage parameters (regarless of windows type)
in a common class."""
def __init__(self, caps):
# keep pylint happy
self.report_id = 0
for fname, ftype in caps._fields_:
if fname.startswith('reserved'):
continue
if fname == 'union':
continue
setattr(self, fname, int(getattr(caps, fname)))
if caps.is_range:
range_struct = caps.union.range
else:
range_struct = caps.union.not_range
for fname, ftype in range_struct._fields_:
if fname.startswith('reserved'):
continue
if fname == 'union':
continue
setattr(self, fname, int(getattr(range_struct, fname)))
self.is_value = False
self.is_button = False
if isinstance(caps, winapi.HIDP_BUTTON_CAPS):
self.is_button = True
elif isinstance(caps, winapi.HIDP_VALUE_CAPS):
self.is_value = True
else:
pass
def inspect(self):
"""Retreive dictionary of 'Field: Value' attributes"""
results = {}
for fname in dir(self):
if not fname.startswith('_'):
value = getattr(self, fname)
if isinstance(value, collections.Callable):
continue
results[fname] = value
return results
def show_hids(target_vid = 0, target_pid = 0, output = None):
"""Check all HID devices conected to PC hosts."""
# first be kind with local encodings
if not output:
# beware your script should manage encodings
output = sys.stdout
# then the big cheese...
from . import tools
all_hids = None
if target_vid:
if target_pid:
# both vendor and product Id provided
device_filter = HidDeviceFilter(vendor_id = target_vid,
product_id = target_pid)
else:
# only vendor id
device_filter = HidDeviceFilter(vendor_id = target_vid)
all_hids = device_filter.get_devices()
else:
all_hids = find_all_hid_devices()
if all_hids:
print("Found HID class devices!, writting details...")
for dev in all_hids:
device_name = str(dev)
output.write(device_name)
output.write('\n\n Path: %s\n' % dev.device_path)
output.write('\n Instance: %s\n' % dev.instance_id)
output.write('\n Port (ID): %s\n' % dev.get_parent_instance_id())
output.write('\n Port (str):%s\n' % str(dev.get_parent_device()))
#
try:
dev.open()
tools.write_documentation(dev, output)
finally:
dev.close()
print("done!")
else:
print("There's not any non system HID class device available")
#
| {
"content_hash": "e45eaf49ce15a7cd61f872eb38723ce6",
"timestamp": "",
"source": "github",
"line_count": 1609,
"max_line_length": 92,
"avg_line_length": 41.26600372902424,
"alnum_prop": 0.5405515309426631,
"repo_name": "rene-aguirre/pywinusb",
"id": "c98501ab81223db81852175ea9c32060dd3d4e7d",
"size": "66447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pywinusb/hid/core.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "149947"
}
],
"symlink_target": ""
} |
"""
The different agnocomplete classes to be discovered
"""
from copy import copy
from six import with_metaclass
from abc import abstractmethod, ABCMeta
import logging
from django.db.models import Q
from django.core.exceptions import ImproperlyConfigured
from django.utils.encoding import force_text as text
from django.conf import settings
import requests
from .constants import AGNOCOMPLETE_DEFAULT_PAGESIZE
from .constants import AGNOCOMPLETE_MIN_PAGESIZE
from .constants import AGNOCOMPLETE_MAX_PAGESIZE
from .constants import AGNOCOMPLETE_DEFAULT_QUERYSIZE
from .constants import AGNOCOMPLETE_MIN_QUERYSIZE
from .exceptions import AuthenticationRequiredAgnocompleteException
from .exceptions import SkipItem
from .exceptions import ItemNotFound
logger = logging.getLogger(__name__)
class ClassPropertyDescriptor(object):
"""
Toolkit class used to instanciate a class property.
"""
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
"""
Setter: the decorated method will become a class property.
"""
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
"""
Decorator: the given function will become a class property.
e.g::
class SafeClass(object):
@classproperty
def safe(cls):
return True
class UnsafeClass(object):
@classproperty
def safe(cls):
return False
"""
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
def load_settings_sizes():
"""
Load sizes from settings or fallback to the module constants
"""
page_size = AGNOCOMPLETE_DEFAULT_PAGESIZE
settings_page_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_PAGESIZE', None)
page_size = settings_page_size or page_size
page_size_min = AGNOCOMPLETE_MIN_PAGESIZE
settings_page_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_PAGESIZE', None)
page_size_min = settings_page_size_min or page_size_min
page_size_max = AGNOCOMPLETE_MAX_PAGESIZE
settings_page_size_max = getattr(
settings, 'AGNOCOMPLETE_MAX_PAGESIZE', None)
page_size_max = settings_page_size_max or page_size_max
# Query sizes
query_size = AGNOCOMPLETE_DEFAULT_QUERYSIZE
settings_query_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_QUERYSIZE', None)
query_size = settings_query_size or query_size
query_size_min = AGNOCOMPLETE_MIN_QUERYSIZE
settings_query_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_QUERYSIZE', None)
query_size_min = settings_query_size_min or query_size_min
return (
page_size, page_size_min, page_size_max,
query_size, query_size_min,
)
class AgnocompleteBase(with_metaclass(ABCMeta, object)):
"""
Base class for Agnocomplete tools.
"""
# To be overridden by settings, or constructor arguments
page_size = None
page_size_max = None
page_size_min = None
query_size = None
query_size_min = None
url = None
def __init__(self, user=None, page_size=None, url=None):
# Loading the user context
self.user = user
# Load from settings or fallback to constants
settings_page_size, settings_page_size_min, settings_page_size_max, \
query_size, query_size_min = load_settings_sizes()
# Use the class attributes or fallback to settings
self._conf_page_size = self.page_size or settings_page_size
self._conf_page_size_min = self.page_size_min or settings_page_size_min
self._conf_page_size_max = self.page_size_max or settings_page_size_max
# Use instance constructor parameters to eventually override defaults
page_size = page_size or self._conf_page_size
if page_size > self._conf_page_size_max \
or page_size < self._conf_page_size_min:
page_size = self._conf_page_size
# Finally set this as the wanted page_size
self._page_size = page_size
# set query sizes
self._query_size = self.query_size or query_size
self._query_size_min = self.query_size_min or query_size_min
# Eventual custom URL
self._url = url
def set_agnocomplete_field(self, field):
self.agnocomplete_field = field
@classproperty
def slug(cls):
"""
Return the key used in the register, used as a slug for the URL.
You can override this by adding a class property.
"""
return cls.__name__
def get_url(self):
return self._url or self.url
def get_page_size(self):
"""
Return the computed page_size
It takes into account:
* class variables
* constructor arguments,
* settings
* fallback to the module constants if needed.
"""
return self._page_size
def get_query_size(self):
"""
Return the computed default query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size
def get_query_size_min(self):
"""
Return the computed minimum query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size_min
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def items(self, query=None, **kwargs):
pass
@abstractmethod
def selected(self, ids):
"""
Return the values (as a tuple of pairs) for the ids provided
"""
pass
def is_valid_query(self, query):
"""
Return True if the search query is valid.
e.g.:
* not empty,
* not too short,
"""
# No query, no item
if not query:
return False
# Query is too short, no item
if len(query) < self.get_query_size_min():
return False
return True
class AgnocompleteChoices(AgnocompleteBase):
"""
Usage Example::
class AgnocompleteColor(AgnocompleteChoices):
choices = (
('red', 'Red'),
('green', 'Green'),
('blue', 'Blue'),
)
"""
choices = ()
def get_choices(self):
return self.choices
def item(self, current_item):
value, label = current_item
return dict(value=value, label=label)
def items(self, query=None, **kwargs):
if not self.is_valid_query(query):
return []
result = copy(self.choices)
if query:
result = filter(lambda x: x[1].lower().startswith(query), result)
result = tuple(result)
# Slicing before rendering
result = result[:self.get_page_size()]
return [self.item(item) for item in result]
def selected(self, ids):
"""
Return the selected options as a list of tuples
"""
result = copy(self.choices)
result = filter(lambda x: x[0] in ids, result)
# result = ((item, item) for item in result)
return list(result)
class AgnocompleteModelBase(with_metaclass(ABCMeta, AgnocompleteBase)):
model = None
requires_authentication = False
@abstractmethod
def get_queryset(self):
pass
@property
def fields(self):
raise NotImplementedError(
"Integrator: You must have a `fields` property")
def get_model(self):
"""
Return the class Model used by this Agnocomplete
"""
if hasattr(self, 'model') and self.model:
return self.model
# Give me a "none" queryset
try:
none = self.get_queryset().none()
return none.model
except Exception:
raise ImproperlyConfigured(
"Integrator: Unable to determine the model with this queryset."
" Please add a `model` property")
def get_model_queryset(self):
"""
Return an unfiltered complete model queryset.
To be used for the select Input initialization
"""
return self.get_model().objects.all()
get_choices = get_model_queryset
def get_field_name(self):
"""
Return the model field name to be used as a value, or 'pk' if unset
"""
if hasattr(self, 'agnocomplete_field') and \
hasattr(self.agnocomplete_field, 'to_field_name'):
return self.agnocomplete_field.to_field_name or 'pk'
return 'pk'
class AgnocompleteModel(AgnocompleteModelBase):
"""
Example::
class AgnocompletePeople(AgnocompleteModel):
model = People
fields = ['first_name', 'last_name']
class AgnocompletePersonQueryset(AgnocompleteModel):
fields = ['first_name', 'last_name']
def get_queryset(self):
return People.objects.filter(email__contains='example.com')
"""
def __init__(self, *args, **kwargs):
super(AgnocompleteModel, self).__init__(*args, **kwargs)
self.__final_queryset = None
def _construct_qs_filter(self, field_name):
"""
Using a field name optionnaly prefixed by `^`, `=`, `@`, return a
case-insensitive filter condition name usable as a queryset `filter()`
keyword argument.
"""
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
def get_queryset(self):
if not hasattr(self, 'model') or not self.model:
raise NotImplementedError(
"Integrator: You must either have a `model` property "
"or a `get_queryset()` method"
)
return self.model.objects.all()
def get_queryset_filters(self, query):
"""
Return the filtered queryset
"""
conditions = Q()
for field_name in self.fields:
conditions |= Q(**{
self._construct_qs_filter(field_name): query
})
return conditions
def paginate(self, qs):
"""
Paginate a given Queryset
"""
return qs[:self.get_page_size()]
@property
def _final_queryset(self):
"""
Paginated final queryset
"""
if self.__final_queryset is None:
return None
return self.paginate(self.__final_queryset)
# final_queryset alias
final_queryset = _final_queryset
@property
def final_raw_queryset(self):
return self.__final_queryset
def serialize(self, queryset):
result = []
for item in self.paginate(queryset):
result.append(self.item(item))
return result
def item(self, current_item):
"""
Return the current item.
@param current_item: Current item
@type param: django.models
@return: Value and label of the current item
@rtype : dict
"""
return {
'value': text(getattr(current_item, self.get_field_name())),
'label': self.label(current_item)
}
def label(self, current_item):
"""
Return a label for the current item.
@param current_item: Current item
@type param: django.models
@return: Label of the current item
@rtype : text
"""
return text(current_item)
def build_extra_filtered_queryset(self, queryset, **kwargs):
"""
Apply eventual queryset filters, based on the optional extra arguments
passed to the query.
By default, this method returns the queryset "verbatim". You can
override or overwrite this to perform custom filter on this QS.
* `queryset`: it's the final queryset build using the search terms.
* `kwargs`: this dictionary contains the extra arguments passed to the
agnocomplete class.
"""
# By default, we're ignoring these arguments and return verbatim QS
return queryset
def build_filtered_queryset(self, query, **kwargs):
"""
Build and return the fully-filtered queryset
"""
# Take the basic queryset
qs = self.get_queryset()
# filter it via the query conditions
qs = qs.filter(self.get_queryset_filters(query))
return self.build_extra_filtered_queryset(qs, **kwargs)
def items(self, query=None, **kwargs):
"""
Return the items to be sent to the client
"""
# Cut this, we don't need no empty query
if not query:
self.__final_queryset = self.get_model().objects.none()
return self.serialize(self.__final_queryset)
# Query is too short, no item
if len(query) < self.get_query_size_min():
self.__final_queryset = self.get_model().objects.none()
return self.serialize(self.__final_queryset)
if self.requires_authentication:
if not self.user:
raise AuthenticationRequiredAgnocompleteException(
"Authentication is required to use this autocomplete"
)
if not self.user.is_authenticated:
raise AuthenticationRequiredAgnocompleteException(
"Authentication is required to use this autocomplete"
)
qs = self.build_filtered_queryset(query, **kwargs)
# The final queryset is the paginated queryset
self.__final_queryset = qs
return self.serialize(qs)
def selected(self, ids):
"""
Return the selected options as a list of tuples
"""
# Cleanup the ID list
if self.get_field_name() == 'pk':
ids = filter(lambda x: "{}".format(x).isdigit(), copy(ids))
else:
ids = filter(lambda x: len("{}".format(x)) > 0, copy(ids))
# Prepare the QS
# TODO: not contextually filtered, check if it's possible at some point
qs = self.get_model_queryset().filter(
**{'{}__in'.format(self.get_field_name()): ids})
result = []
for item in qs:
item_repr = self.item(item)
result.append(
(item_repr['value'], item_repr['label'])
)
return result
class AgnocompleteUrlProxy(with_metaclass(ABCMeta, AgnocompleteBase)):
"""
This class serves as a proxy between your application and a 3rd party
URL (typically a REST HTTP API).
"""
value_key = 'value'
label_key = 'label'
method = 'get'
data_key = 'data'
def get_search_url(self):
raise NotImplementedError(
"Integrator: You must implement a `get_search_url` method"
" or have a `search_url` property in this class.")
@property
def search_url(self):
return self.get_search_url()
def get_item_url(self, pk):
raise NotImplementedError(
"Integrator: You must implement a `get_item_url` method")
def get_choices(self):
return []
def get_http_method_arg_name(self):
"""
Return the HTTP function to call and the params/data argument name
"""
if self.method == 'get':
arg_name = 'params'
else:
arg_name = 'data'
return getattr(requests, self.method), arg_name
def http_call(self, url=None, **kwargs):
"""
Call the target URL via HTTP and return the JSON result
"""
if not url:
url = self.search_url
http_func, arg_name = self.get_http_method_arg_name()
# Build the argument dictionary to pass in the http function
_kwargs = {
arg_name: kwargs,
}
# The actual HTTP call
response = http_func(
url=url.format(**kwargs),
headers=self.get_http_headers(),
**_kwargs
)
# Error handling
if response.status_code != 200:
logger.warning('Invalid Request for `%s`', response.url)
# Raising a "requests" exception
response.raise_for_status()
return response.json()
def item(self, current_item):
return dict(
value=text(current_item[self.value_key]),
label=text(current_item[self.label_key]),
)
def get_http_headers(self):
"""
Return a dictionary that will be added to the HTTP request to the API
You can overwrite this method, that return an empty dict by default.
"""
return {}
def get_http_result(self, http_result):
"""
Return an iterable with all the result items in.
You can override/overwrite this method to adapt it to the payload
returned by the 3rd party API.
"""
return http_result.get(self.data_key, [])
def get_http_call_kwargs(self, query, **kwargs):
"""
Return the HTTP query arguments.
You can override this method to pass further arguments corresponding
to your search_url.
"""
return {'q': query}
def items(self, query=None, **kwargs):
if not self.is_valid_query(query):
return []
# Call to search URL
http_result = self.http_call(
**self.get_http_call_kwargs(query, **kwargs)
)
# In case of error, on the API side, the error is raised and handled
# in the view.
http_result = self.get_http_result(http_result)
result = []
for item in http_result:
# Eventual result reshaping.
try:
result.append(self.item(item))
except SkipItem:
continue
return result
def selected(self, ids):
data = []
# Filter out "falsy IDs" (empty string, None, 0...)
ids = filter(lambda x: x, ids)
for _id in ids:
if _id:
# Call to the item URL
result = self.http_call(url=self.get_item_url(pk=_id))
if self.data_key in result and len(result[self.data_key]):
for item in result[self.data_key]:
data.append(
(
text(item[self.value_key]),
text(item[self.label_key])
)
)
return data
def validate(self, value):
"""
From a value available on the remote server, the method returns the
complete item matching the value.
If case the value is not available on the server side or filtered
through :meth:`item`, the class:`agnocomplete.exceptions.ItemNotFound`
is raised.
"""
url = self.get_item_url(value)
try:
data = self.http_call(url=url)
except requests.HTTPError:
raise ItemNotFound()
data = self.get_http_result(data)
try:
self.item(data)
except SkipItem:
raise ItemNotFound()
return value
| {
"content_hash": "c9ad27ad3503821c86928958f50d91a5",
"timestamp": "",
"source": "github",
"line_count": 669,
"max_line_length": 79,
"avg_line_length": 29.965620328849027,
"alnum_prop": 0.581433630967227,
"repo_name": "novafloss/django-agnocomplete",
"id": "4fbcd5062c8b70c63520b075ea216a0f5bca83a8",
"size": "20047",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "agnocomplete/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "766"
},
{
"name": "Python",
"bytes": "42627"
}
],
"symlink_target": ""
} |
import os
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch import receiver
from .base import Pessoa
from djangosige.apps.login.models import Usuario
from djangosige.configs.settings import MEDIA_ROOT
def logo_directory_path(instance, filename):
extension = os.path.splitext(filename)[1]
return 'imagens/empresas/logo_{0}_{1}{2}'.format(instance.nome_razao_social, instance.id, extension)
class Empresa(Pessoa):
logo_file = models.ImageField(
upload_to=logo_directory_path, default='imagens/logo.png', blank=True, null=True)
cnae = models.CharField(max_length=10, blank=True, null=True)
iest = models.CharField(max_length=32, null=True, blank=True)
class Meta:
verbose_name = "Empresa"
@property
def caminho_completo_logo(self):
if self.logo_file.name != 'imagens/logo.png':
return os.path.join(MEDIA_ROOT, self.logo_file.name)
else:
return ''
def save(self, *args, **kwargs):
# Deletar logo se ja existir um
try:
obj = Empresa.objects.get(id=self.id)
if obj.logo_file != self.logo_file and obj.logo_file != 'imagens/logo.png':
obj.logo_file.delete(save=False)
except:
pass
super(Empresa, self).save(*args, **kwargs)
def __unicode__(self):
return u'%s' % self.nome_razao_social
def __str__(self):
return u'%s' % self.nome_razao_social
# Deletar logo quando empresa for deletada
@receiver(post_delete, sender=Empresa)
def logo_post_delete_handler(sender, instance, **kwargs):
# Nao deletar a imagem default 'logo.png'
if instance.logo_file != 'imagens/logo.png':
instance.logo_file.delete(False)
class MinhaEmpresa(models.Model):
m_empresa = models.ForeignKey(
Empresa, on_delete=models.CASCADE, related_name='minha_empresa', blank=True, null=True)
m_usuario = models.ForeignKey(
Usuario, on_delete=models.CASCADE, related_name='empresa_usuario')
| {
"content_hash": "4f7f5250b021c36a93ee28c4ed326a31",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 104,
"avg_line_length": 32.55555555555556,
"alnum_prop": 0.666991711360312,
"repo_name": "thiagopena/djangoSIGE",
"id": "dad1a8de5ee8de72b6f64a27a1748f857e46a923",
"size": "2076",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djangosige/apps/cadastro/models/empresa.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "62842"
},
{
"name": "Dockerfile",
"bytes": "400"
},
{
"name": "HTML",
"bytes": "387328"
},
{
"name": "JavaScript",
"bytes": "188596"
},
{
"name": "Makefile",
"bytes": "313"
},
{
"name": "Python",
"bytes": "805518"
}
],
"symlink_target": ""
} |
import theano.tensor as tt
import pymc3 as pm
class HotBinomial(pm.Binomial):
"""
A "hot" Binomial distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Binomial distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotBinomial, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Binomial.logp(self, value)
class HotBetaBinomial(pm.BetaBinomial):
"""
A "hot" BetaBinomial distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot BetaBinomial distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotBetaBinomial, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.BetaBinomial.logp(self, value)
class HotBernoulli(pm.Bernoulli):
"""
A "hot" Bernoulli distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Bernoulli distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotBernoulli, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Bernoulli.logp(self, value)
class HotDiscreteWeibull(pm.DiscreteWeibull):
"""
A "hot" DiscreteWeibull distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot DiscreteWeibull distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotDiscreteWeibull, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.DiscreteWeibull.logp(self, value)
class HotPoisson(pm.Poisson):
"""
A "hot" Poisson distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Poisson distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotPoisson, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Poisson.logp(self, value)
class HotNegativeBinomial(pm.NegativeBinomial):
"""
A "hot" NegativeBinomial distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot NegativeBinomial distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotNegativeBinomial, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.NegativeBinomial.logp(self, value)
class HotConstantDist(pm.ConstantDist):
"""
A "hot" ConstantDist distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot ConstantDist distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotConstantDist, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.ConstantDist.logp(self, value)
class HotConstant(pm.Constant):
"""
A "hot" Constant distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Constant distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotConstant, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Constant.logp(self, value)
class HotZeroInflatedPoisson(pm.ZeroInflatedPoisson):
"""
A "hot" ZeroInflatedPoisson distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot ZeroInflatedPoisson distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotZeroInflatedPoisson, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.ZeroInflatedPoisson.logp(self, value)
class HotZeroInflatedBinomial(pm.ZeroInflatedBinomial):
"""
A "hot" ZeroInflatedBinomial distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot ZeroInflatedBinomial distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotZeroInflatedBinomial, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.ZeroInflatedBinomial.logp(self, value)
class HotZeroInflatedNegativeBinomial(pm.ZeroInflatedNegativeBinomial):
"""
A "hot" ZeroInflatedNegativeBinomial distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot ZeroInflatedNegativeBinomial distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotZeroInflatedNegativeBinomial, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.ZeroInflatedNegativeBinomial.logp(self, value)
class HotDiscreteUniform(pm.DiscreteUniform):
"""
A "hot" DiscreteUniform distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot DiscreteUniform distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotDiscreteUniform, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.DiscreteUniform.logp(self, value)
class HotGeometric(pm.Geometric):
"""
A "hot" Geometric distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Geometric distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotGeometric, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Geometric.logp(self, value)
class HotCategorical(pm.Categorical):
"""
A "hot" Categorical distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Categorical distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotCategorical, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Categorical.logp(self, value)
class HotUniform(pm.Uniform):
"""
A "hot" Uniform distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Uniform distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotUniform, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Uniform.logp(self, value)
class HotFlat(pm.Flat):
"""
A "hot" Flat distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Flat distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotFlat, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Flat.logp(self, value)
class HotHalfFlat(pm.HalfFlat):
"""
A "hot" HalfFlat distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot HalfFlat distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotHalfFlat, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.HalfFlat.logp(self, value)
class HotNormal(pm.Normal):
"""
A "hot" Normal distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Normal distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotNormal, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Normal.logp(self, value)
class HotBeta(pm.Beta):
"""
A "hot" Beta distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Beta distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotBeta, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Beta.logp(self, value)
class HotExponential(pm.Exponential):
"""
A "hot" Exponential distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Exponential distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotExponential, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Exponential.logp(self, value)
class HotLaplace(pm.Laplace):
"""
A "hot" Laplace distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Laplace distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotLaplace, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Laplace.logp(self, value)
class HotStudentT(pm.StudentT):
"""
A "hot" StudentT distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot StudentT distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotStudentT, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.StudentT.logp(self, value)
class HotCauchy(pm.Cauchy):
"""
A "hot" Cauchy distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Cauchy distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotCauchy, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Cauchy.logp(self, value)
class HotHalfCauchy(pm.HalfCauchy):
"""
A "hot" HalfCauchy distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot HalfCauchy distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotHalfCauchy, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.HalfCauchy.logp(self, value)
class HotGamma(pm.Gamma):
"""
A "hot" Gamma distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Gamma distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotGamma, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Gamma.logp(self, value)
class HotWeibull(pm.Weibull):
"""
A "hot" Weibull distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Weibull distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotWeibull, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Weibull.logp(self, value)
HotHalfStudentT = pm.Bound(pm.StudentT, lower=0)
class HotLognormal(pm.Lognormal):
"""
A "hot" Lognormal distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Lognormal distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotLognormal, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Lognormal.logp(self, value)
class HotChiSquared(pm.ChiSquared):
"""
A "hot" ChiSquared distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot ChiSquared distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotChiSquared, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.ChiSquared.logp(self, value)
class HotHalfNormal(pm.HalfNormal):
"""
A "hot" HalfNormal distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot HalfNormal distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotHalfNormal, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.HalfNormal.logp(self, value)
class HotWald(pm.Wald):
"""
A "hot" Wald distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Wald distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotWald, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Wald.logp(self, value)
class HotPareto(pm.Pareto):
"""
A "hot" Pareto distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Pareto distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotPareto, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Pareto.logp(self, value)
class HotInverseGamma(pm.InverseGamma):
"""
A "hot" InverseGamma distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot InverseGamma distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotInverseGamma, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.InverseGamma.logp(self, value)
class HotExGaussian(pm.ExGaussian):
"""
A "hot" ExGaussian distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot ExGaussian distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotExGaussian, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.ExGaussian.logp(self, value)
class HotVonMises(pm.VonMises):
"""
A "hot" VonMises distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot VonMises distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotVonMises, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.VonMises.logp(self, value)
class HotSkewNormal(pm.SkewNormal):
"""
A "hot" SkewNormal distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot SkewNormal distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotSkewNormal, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.SkewNormal.logp(self, value)
class HotLogistic(pm.Logistic):
"""
A "hot" Logistic distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Logistic distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotLogistic, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Logistic.logp(self, value)
class HotInterpolated(pm.Interpolated):
"""
A "hot" Interpolated distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Interpolated distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotInterpolated, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Interpolated.logp(self, value)
class HotMvNormal(pm.MvNormal):
"""
A "hot" MvNormal distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot MvNormal distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotMvNormal, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.MvNormal.logp(self, value)
class HotMvStudentT(pm.MvStudentT):
"""
A "hot" MvStudentT distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot MvStudentT distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotMvStudentT, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.MvStudentT.logp(self, value)
class HotDirichlet(pm.Dirichlet):
"""
A "hot" Dirichlet distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Dirichlet distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotDirichlet, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Dirichlet.logp(self, value)
class HotMultinomial(pm.Multinomial):
"""
A "hot" Multinomial distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Multinomial distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotMultinomial, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Multinomial.logp(self, value)
class HotWishart(pm.Wishart):
"""
A "hot" Wishart distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Wishart distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotWishart, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Wishart.logp(self, value)
class HotLKJCorr(pm.LKJCorr):
"""
A "hot" LKJCorr distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot LKJCorr distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotLKJCorr, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.LKJCorr.logp(self, value)
class HotLKJCholeskyCov(pm.LKJCholeskyCov):
"""
A "hot" LKJCholeskyCov distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot LKJCholeskyCov distribution.
"""
def __init__(self, beta_temp, *args, **kwargs):
super(HotLKJCholeskyCov, self).__init__(*args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.LKJCholeskyCov.logp(self, value)
class HotMixture(pm.Mixture):
"""
A "hot" Mixture distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot Mixture distribution.
"""
def __init__(self, beta_temp, w, comp_dists, *args, **kwargs):
super(HotMixture, self).__init__(w, comp_dists, *args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.Mixture.logp(self, value)
class HotNormalMixture(pm.NormalMixture):
"""
A "hot" NormalMixture distribution.
Parameters
----------
beta_temp : float on interval [0, 1]
Beta value (inverse temperature) of the distribution.
Returns
-------
output : pymc3 distribution
Hot NormalMixture distribution.
"""
def __init__(self, beta_temp, w, mu, *args, **kwargs):
super(HotNormalMixture, self).__init__(w, mu, *args, **kwargs)
if not (0 < beta_temp <= 1):
raise RuntimeError('Must have 0 < beta_temp ≤ 1.')
self.beta_temp = beta_temp
def logp(self, value):
return self.beta_temp * pm.NormalMixture.logp(self, value)
| {
"content_hash": "2f8a7d82d8186f395e049bbbc39ed86e",
"timestamp": "",
"source": "github",
"line_count": 1111,
"max_line_length": 81,
"avg_line_length": 27.77047704770477,
"alnum_prop": 0.5752114867273846,
"repo_name": "justinbois/bebi103_utils",
"id": "6047b7ab6302db32fbff9953a93b760b93b745f3",
"size": "30945",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bebi103/deprecated/hotdists.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "63875"
}
],
"symlink_target": ""
} |
from pyspark.ml.param import *
from ai.h2o.sparkling.ml.params.H2ODimReductionExtraParamsOnMOJO import H2ODimReductionExtraParamsOnMOJO
class H2OGLRMExtraParamsOnMOJO(H2ODimReductionExtraParamsOnMOJO):
def getReconstructedCol(self):
return self._java_obj.getReconstructedCol()
def getWithReconstructedlCol(self):
return self._java_obj.getWithReconstructedCol()
def getMaxScoringIterations(self):
return self._java_obj.getMaxScoringIterations()
def setReconstructedCol(self, value):
self._java_obj.setReconstructedCol(value)
return self
def setWithReconstructedCol(self, value):
self._java_obj.setWithReconstructedCol(value)
return self
def setMaxScoringIterations(self, value):
self._java_obj.setMaxScoringIterations(value)
return self
| {
"content_hash": "e5c5c6f7cbc21657741a1e408efd597d",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 104,
"avg_line_length": 33.56,
"alnum_prop": 0.7485101311084624,
"repo_name": "h2oai/sparkling-water",
"id": "cd7d578a055a144645096db0ab83992818e18bd4",
"size": "1624",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py-scoring/src/ai/h2o/sparkling/ml/params/H2OGLRMExtraParamsOnMOJO.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8719"
},
{
"name": "CSS",
"bytes": "4539"
},
{
"name": "Groovy",
"bytes": "122809"
},
{
"name": "HCL",
"bytes": "44156"
},
{
"name": "Java",
"bytes": "35009"
},
{
"name": "Python",
"bytes": "442716"
},
{
"name": "R",
"bytes": "63088"
},
{
"name": "Scala",
"bytes": "1720448"
},
{
"name": "Shell",
"bytes": "29726"
},
{
"name": "TeX",
"bytes": "132310"
}
],
"symlink_target": ""
} |
from .aniversariantes import *
from .cria_usuario import *
from .views import *
| {
"content_hash": "86fb670e36d0280e46f770699438ea97",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 30,
"avg_line_length": 26.666666666666668,
"alnum_prop": 0.7625,
"repo_name": "anselmobd/fo2",
"id": "74a7365dc6538d3ac414c7bb791c7d4d9e113daf",
"size": "80",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/persona/views/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "160899"
},
{
"name": "HTML",
"bytes": "855985"
},
{
"name": "JavaScript",
"bytes": "203109"
},
{
"name": "PLSQL",
"bytes": "2762"
},
{
"name": "Python",
"bytes": "3228268"
},
{
"name": "Shell",
"bytes": "2161"
}
],
"symlink_target": ""
} |
def jungle_animal(animal, my_speed):
# YOUR CODE HERE
text = "Introduce yourself!"
if animal == "zebra":
text = "Try to ride a zebra!"
if animal == "cheetah" and my_speed <= 115:
text = "Stay calm and wait!"
if animal == "cheetah" and my_speed > 115:
text = "Run!"
print text
#jungle_animal('cheetah', 30)
#>>> "Stay calm and wait!"
#jungle_animal('gorilla', 21)
#>>> "Introduce yourself!"
| {
"content_hash": "cacf777743b645d9af3889c1b0f0aa4f",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 47,
"avg_line_length": 25.941176470588236,
"alnum_prop": 0.5850340136054422,
"repo_name": "ezralalonde/cloaked-octo-sansa",
"id": "fbc2adb212b40580c42c54130acdfdae24cf383e",
"size": "987",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "02/ex2/04.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "130753"
}
],
"symlink_target": ""
} |
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "_ext")))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['configext']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'phpMyAdmin'
copyright = u'2012 - 2014, The phpMyAdmin devel team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '4.3.10'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'html', 'doctrees']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'phpMyAdmindoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'phpMyAdmin.tex', u'phpMyAdmin Documentation',
u'The phpMyAdmin devel team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'phpmyadmin', u'phpMyAdmin Documentation',
[u'The phpMyAdmin devel team'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'phpMyAdmin', u'phpMyAdmin Documentation',
u'The phpMyAdmin devel team', 'phpMyAdmin', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'phpMyAdmin'
epub_author = u'The phpMyAdmin devel team'
epub_publisher = u'The phpMyAdmin devel team'
epub_copyright = copyright
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Highlight PHP without starting <?php tag
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True)
| {
"content_hash": "b5b30f2e78020c543d46d5b064d43ffb",
"timestamp": "",
"source": "github",
"line_count": 291,
"max_line_length": 82,
"avg_line_length": 31.786941580756015,
"alnum_prop": 0.707891891891892,
"repo_name": "jarlyyn/jarlyyn-docker-image",
"id": "fec0279f6b5fafc78e9c90865ce256d649609c3d",
"size": "9671",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "image/dockerfile/dockertools/app/phpmyadmin/doc/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "1557"
},
{
"name": "CSS",
"bytes": "40634"
},
{
"name": "HTML",
"bytes": "784476"
},
{
"name": "JavaScript",
"bytes": "4803860"
},
{
"name": "Makefile",
"bytes": "6774"
},
{
"name": "PHP",
"bytes": "7019962"
},
{
"name": "Perl",
"bytes": "39"
},
{
"name": "Python",
"bytes": "16293"
},
{
"name": "Shell",
"bytes": "10796"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from setuptools import setup, find_packages
readme = open('README.md').read()
setup(
name='fabcloudkit',
version='0.025',
url='http://github.com/waxkinetic/fabcloudkit',
license='BSD',
author='Rick Bohrer',
author_email='waxkinetic@gmail.com',
description='An AWS provisioning, build, and deployment library built on Fabric and Boto.',
long_description=readme,
zip_safe=False,
include_package_data=True,
packages=find_packages(),
setup_requires=[
'setuptools-git >= 1.0b1'
],
install_requires=[
'boto >= 2.7.0',
'fabric >= 1.5.2',
'pyaml >= 13.01.0'
]
)
| {
"content_hash": "3c8fbeb4f2cdf772cb8f6904fe49455f",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 95,
"avg_line_length": 21.03030303030303,
"alnum_prop": 0.6210374639769453,
"repo_name": "waxkinetic/fabcloudkit",
"id": "b462915472528f609cbfc71baf352688240b0bd9",
"size": "694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "103982"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import djbetty.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='VideohubVideo',
fields=[
('id', models.IntegerField(serialize=False, primary_key=True)),
('title', models.CharField(max_length=512)),
('description', models.TextField(blank=True, default='')),
('keywords', models.TextField(blank=True, default='')),
('image', djbetty.fields.ImageField(blank=True, null=True, alt_field='_image_alt', default=None, caption_field='_image_caption')),
('_image_alt', models.CharField(blank=True, null=True, max_length=255, editable=False)),
('_image_caption', models.CharField(blank=True, null=True, max_length=255, editable=False)),
('channel_id', models.IntegerField(blank=True, null=True, default=None)),
],
options={
'abstract': False,
},
),
]
| {
"content_hash": "4a296002b94715fe8e153168cc861012",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 146,
"avg_line_length": 38.724137931034484,
"alnum_prop": 0.5779162956366874,
"repo_name": "theonion/django-bulbs",
"id": "e5499b5f1934046d4baebfae4bb28fabdd67b86f",
"size": "1147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bulbs/videos/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "36651"
},
{
"name": "HTML",
"bytes": "73968"
},
{
"name": "JavaScript",
"bytes": "57288"
},
{
"name": "Python",
"bytes": "1055540"
},
{
"name": "Ruby",
"bytes": "397"
},
{
"name": "Shell",
"bytes": "1629"
}
],
"symlink_target": ""
} |
from tests import unittest
from botocore import model
from botocore.paginate import Paginator
from botocore.paginate import PaginatorModel
from botocore.paginate import TokenDecoder
from botocore.paginate import TokenEncoder
from botocore.exceptions import PaginationError
from botocore.compat import six
import mock
def encode_token(token):
return TokenEncoder().encode(token)
class TestTokenDecoder(unittest.TestCase):
def setUp(self):
self.decoder = TokenDecoder()
def test_decode(self):
token = 'eyJmb28iOiAiYmFyIn0='
expected = {'foo': 'bar'}
self.assertEqual(self.decoder.decode(token), expected)
def test_decode_with_bytes(self):
token = (
'eyJib3RvX2VuY29kZWRfa2V5cyI6IFtbImZvbyJdXSwgImZvbyI6ICJZbUZ5In0='
)
expected = {'foo': b'bar'}
self.assertEqual(self.decoder.decode(token), expected)
def test_decode_with_nested_bytes(self):
token = (
'eyJmb28iOiB7ImJhciI6ICJZbUY2In0sICJib3RvX2VuY29kZWRfa2V5cyI6'
'IFtbImZvbyIsICJiYXIiXV19'
)
expected = {'foo': {'bar': b'baz'}}
self.assertEqual(self.decoder.decode(token), expected)
def test_decode_with_listed_bytes(self):
token = (
'eyJib3RvX2VuY29kZWRfa2V5cyI6IFtbImZvbyIsICJiYXIiLCAxXV0sICJmb28i'
'OiB7ImJhciI6IFsiYmF6IiwgIlltbHUiXX19'
)
expected = {'foo': {'bar': ['baz', b'bin']}}
self.assertEqual(self.decoder.decode(token), expected)
def test_decode_with_multiple_bytes_values(self):
token = (
'eyJib3RvX2VuY29kZWRfa2V5cyI6IFtbImZvbyIsICJiaW4iXSwgWyJmb28iLCAi'
'YmFyIl1dLCAiZm9vIjogeyJiaW4iOiAiWW1GdCIsICJiYXIiOiAiWW1GNiJ9fQ=='
)
expected = {'foo': {'bar': b'baz', 'bin': b'bam'}}
self.assertEqual(self.decoder.decode(token), expected)
class TestPaginatorModel(unittest.TestCase):
def setUp(self):
self.paginator_config = {}
self.paginator_config['pagination'] = {
'ListFoos': {
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Foo'
}
}
self.paginator_model = PaginatorModel(self.paginator_config)
def test_get_paginator(self):
paginator_config = self.paginator_model.get_paginator('ListFoos')
self.assertEqual(
paginator_config,
{'output_token': 'NextToken', 'input_token': 'NextToken',
'result_key': 'Foo'}
)
def test_get_paginator_no_exists(self):
with self.assertRaises(ValueError):
paginator_config = self.paginator_model.get_paginator('ListBars')
class TestPagination(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_result_key_available(self):
self.assertEqual(
[rk.expression for rk in self.paginator.result_keys],
['Foo']
)
def test_no_next_token(self):
response = {'not_the_next_token': 'foobar'}
self.method.return_value = response
actual = list(self.paginator.paginate())
self.assertEqual(actual, [{'not_the_next_token': 'foobar'}])
def test_next_token_in_response(self):
responses = [{'NextToken': 'token1'},
{'NextToken': 'token2'},
{'not_next_token': 'foo'}]
self.method.side_effect = responses
actual = list(self.paginator.paginate())
self.assertEqual(actual, responses)
# The first call has no next token, the second and third call should
# have 'token1' and 'token2' respectively.
self.assertEqual(self.method.call_args_list,
[mock.call(), mock.call(NextToken='token1'),
mock.call(NextToken='token2')])
def test_next_token_is_string(self):
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users",
"limit_key": "MaxKeys",
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]}
]
self.method.side_effect = responses
result = self.paginator.paginate(PaginationConfig={'MaxItems': 1})
result = result.build_full_result()
token = result.get('NextToken')
self.assertIsInstance(token, six.string_types)
def test_any_passed_in_args_are_unmodified(self):
responses = [{'NextToken': 'token1'},
{'NextToken': 'token2'},
{'not_next_token': 'foo'}]
self.method.side_effect = responses
actual = list(self.paginator.paginate(Foo='foo', Bar='bar'))
self.assertEqual(actual, responses)
self.assertEqual(
self.method.call_args_list,
[mock.call(Foo='foo', Bar='bar'),
mock.call(Foo='foo', Bar='bar', NextToken='token1'),
mock.call(Foo='foo', Bar='bar', NextToken='token2')])
def test_exception_raised_if_same_next_token(self):
responses = [{'NextToken': 'token1'},
{'NextToken': 'token2'},
{'NextToken': 'token2'}]
self.method.side_effect = responses
with self.assertRaises(PaginationError):
list(self.paginator.paginate())
def test_next_token_with_or_expression(self):
self.pagination_config = {
'output_token': 'NextToken || NextToken2',
'input_token': 'NextToken',
'result_key': 'Foo',
}
self.paginator = Paginator(self.method, self.pagination_config, self.model)
# Verify that despite varying between NextToken and NextToken2
# we still can extract the right next tokens.
responses = [
{'NextToken': 'token1'},
{'NextToken2': 'token2'},
# The first match found wins, so because NextToken is
# listed before NextToken2 in the 'output_tokens' config,
# 'token3' is chosen over 'token4'.
{'NextToken': 'token3', 'NextToken2': 'token4'},
{'not_next_token': 'foo'},
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(NextToken='token1'),
mock.call(NextToken='token2'),
mock.call(NextToken='token3')])
def test_more_tokens(self):
# Some pagination configs have a 'more_token' key that
# indicate whether or not the results are being paginated.
self.paginate_config = {
'more_results': 'IsTruncated',
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{'Foo': [1], 'IsTruncated': True, 'NextToken': 'token1'},
{'Foo': [2], 'IsTruncated': True, 'NextToken': 'token2'},
{'Foo': [3], 'IsTruncated': False, 'NextToken': 'token3'},
{'Foo': [4], 'not_next_token': 'foo'},
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(NextToken='token1'),
mock.call(NextToken='token2')])
def test_more_tokens_is_path_expression(self):
self.paginate_config = {
'more_results': 'Foo.IsTruncated',
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Bar',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{'Foo': {'IsTruncated': True}, 'NextToken': 'token1'},
{'Foo': {'IsTruncated': False}, 'NextToken': 'token2'},
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(NextToken='token1')])
def test_page_size(self):
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users",
"limit_key": "MaxKeys",
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
users = []
for page in self.paginator.paginate(PaginationConfig={'PageSize': 1}):
users += page['Users']
self.assertEqual(
self.method.call_args_list,
[mock.call(MaxKeys=1),
mock.call(Marker='m1', MaxKeys=1),
mock.call(Marker='m2', MaxKeys=1)]
)
def test_with_empty_markers(self):
responses = [
{"Users": ["User1"], "Marker": ""},
{"Users": ["User1"], "Marker": ""},
{"Users": ["User1"], "Marker": ""}
]
self.method.side_effect = responses
users = []
for page in self.paginator.paginate():
users += page['Users']
# We want to stop paginating if the next token is empty.
self.assertEqual(
self.method.call_args_list,
[mock.call()]
)
self.assertEqual(users, ['User1'])
def test_build_full_result_with_single_key(self):
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users",
"limit_key": "MaxKeys",
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete, {'Users': ['User1', 'User2', 'User3']})
def test_build_multiple_results(self):
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users",
"limit_key": "MaxKeys",
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
max_items = 3
page_size = 2
responses = [
{"Users": ["User1", "User2"], "Marker": "m1"},
{"Users": ["User3", "User4"], "Marker": "m2"},
{"Users": ["User3", "User4"], "Marker": "m2"},
{"Users": ["User5", "User6", "User7"], "Marker": "m3"},
]
self.method.side_effect = responses
pages = self.paginator.paginate(
PaginationConfig={
'PageSize': page_size,
'MaxItems': max_items
}
)
result = pages.build_full_result()
pages = self.paginator.paginate(
PaginationConfig={
'MaxItems': max_items,
'PageSize': page_size,
'StartingToken': result['NextToken']
}
)
result = pages.build_full_result()
expected_token = encode_token({
'Marker': 'm2',
'boto_truncate_amount': 2,
})
self.assertEqual(expected_token, result['NextToken'])
class TestPaginatorPageSize(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": ["Users", "Groups"],
'limit_key': 'MaxKeys',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
self.endpoint = mock.Mock()
def test_no_page_size(self):
kwargs = {'arg1': 'foo', 'arg2': 'bar'}
ref_kwargs = {'arg1': 'foo', 'arg2': 'bar'}
pages = self.paginator.paginate(**kwargs)
pages._inject_starting_params(kwargs)
self.assertEqual(kwargs, ref_kwargs)
def test_page_size(self):
kwargs = {'arg1': 'foo', 'arg2': 'bar',
'PaginationConfig': {'PageSize': 5}}
extracted_kwargs = {'arg1': 'foo', 'arg2': 'bar'}
# Note that ``MaxKeys`` in ``setUp()`` is the parameter used for
# the page size for pagination.
ref_kwargs = {'arg1': 'foo', 'arg2': 'bar', 'MaxKeys': 5}
pages = self.paginator.paginate(**kwargs)
pages._inject_starting_params(extracted_kwargs)
self.assertEqual(extracted_kwargs, ref_kwargs)
def test_page_size_incorrectly_provided(self):
kwargs = {'arg1': 'foo', 'arg2': 'bar',
'PaginationConfig': {'PageSize': 5}}
del self.paginate_config['limit_key']
paginator = Paginator(self.method, self.paginate_config, self.model)
with self.assertRaises(PaginationError):
paginator.paginate(**kwargs)
class TestPaginatorWithPathExpressions(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something we'd see in s3 pagination.
self.paginate_config = {
'output_token': [
'NextMarker || ListBucketResult.Contents[-1].Key'],
'input_token': 'next_marker',
'result_key': 'Contents',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_s3_list_objects(self):
responses = [
{'NextMarker': 'token1'},
{'NextMarker': 'token2'},
{'not_next_token': 'foo'}]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(next_marker='token1'),
mock.call(next_marker='token2')])
def test_s3_list_object_complex(self):
responses = [
{'NextMarker': 'token1'},
{'ListBucketResult': {
'Contents': [{"Key": "first"}, {"Key": "Last"}]}},
{'not_next_token': 'foo'}]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(next_marker='token1'),
mock.call(next_marker='Last')])
class TestBinaryTokens(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users"
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_build_full_result_with_bytes(self):
responses = [
{"Users": ["User1", "User2"], "Marker": b'\xff'},
{"Users": ["User3", "User4"], "Marker": b'\xfe'},
{"Users": ["User5"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate(PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token({
"Marker": b'\xff', "boto_truncate_amount": 1,
})
expected_response = {
"Users": ["User1", "User2", "User3"],
"NextToken": expected_token
}
self.assertEqual(complete, expected_response)
def test_build_full_result_with_nested_bytes(self):
responses = [
{"Users": ["User1", "User2"], "Marker": {'key': b'\xff'}},
{"Users": ["User3", "User4"], "Marker": {'key': b'\xfe'}},
{"Users": ["User5"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate(PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token({
"Marker": {'key': b'\xff'}, "boto_truncate_amount": 1,
})
expected_response = {
"Users": ["User1", "User2", "User3"],
"NextToken": expected_token
}
self.assertEqual(complete, expected_response)
def test_build_full_result_with_listed_bytes(self):
responses = [
{"Users": ["User1", "User2"], "Marker": {'key': ['foo', b'\xff']}},
{"Users": ["User3", "User4"], "Marker": {'key': ['foo', b'\xfe']}},
{"Users": ["User5"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate(PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token({
"Marker": {'key': ['foo', b'\xff']}, "boto_truncate_amount": 1,
})
expected_response = {
"Users": ["User1", "User2", "User3"],
"NextToken": expected_token
}
self.assertEqual(complete, expected_response)
def test_build_full_result_with_multiple_bytes_values(self):
responses = [
{
"Users": ["User1", "User2"],
"Marker": {'key': b'\xff', 'key2': b'\xef'}
},
{
"Users": ["User3", "User4"],
"Marker": {'key': b'\xfe', 'key2': b'\xee'}
},
{
"Users": ["User5"]
}
]
self.method.side_effect = responses
pages = self.paginator.paginate(PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token({
"Marker": {'key': b'\xff', 'key2': b'\xef'},
"boto_truncate_amount": 1,
})
expected_response = {
"Users": ["User1", "User2", "User3"],
"NextToken": expected_token
}
self.assertEqual(complete, expected_response)
def test_resume_with_bytes(self):
responses = [
{"Users": ["User3", "User4"], "Marker": b'\xfe'},
{"Users": ["User5"]}
]
self.method.side_effect = responses
starting_token = encode_token({
"Marker": b'\xff', "boto_truncate_amount": 1,
})
pages = self.paginator.paginate(
PaginationConfig={'StartingToken': starting_token})
complete = pages.build_full_result()
expected_response = {
"Users": ["User4", "User5"]
}
self.assertEqual(complete, expected_response)
self.method.assert_any_call(Marker=b'\xff')
def test_resume_with_nested_bytes(self):
responses = [
{"Users": ["User3", "User4"], "Marker": {'key': b'\xfe'}},
{"Users": ["User5"]}
]
self.method.side_effect = responses
starting_token = encode_token({
"Marker": {'key': b'\xff'}, "boto_truncate_amount": 1,
})
pages = self.paginator.paginate(
PaginationConfig={'StartingToken': starting_token})
complete = pages.build_full_result()
expected_response = {
"Users": ["User4", "User5"]
}
self.assertEqual(complete, expected_response)
self.method.assert_any_call(Marker={'key': b'\xff'})
def test_resume_with_listed_bytes(self):
responses = [
{"Users": ["User3", "User4"], "Marker": {'key': ['bar', b'\xfe']}},
{"Users": ["User5"]}
]
self.method.side_effect = responses
starting_token = encode_token({
"Marker": {'key': ['foo', b'\xff']}, "boto_truncate_amount": 1,
})
pages = self.paginator.paginate(
PaginationConfig={'StartingToken': starting_token})
complete = pages.build_full_result()
expected_response = {
"Users": ["User4", "User5"]
}
self.assertEqual(complete, expected_response)
self.method.assert_any_call(Marker={'key': ['foo', b'\xff']})
def test_resume_with_multiple_bytes_values(self):
responses = [
{
"Users": ["User3", "User4"],
"Marker": {'key': b'\xfe', 'key2': b'\xee'}
},
{
"Users": ["User5"]
}
]
self.method.side_effect = responses
starting_token = encode_token({
"Marker": {'key': b'\xff', 'key2': b'\xef'},
"boto_truncate_amount": 1,
})
pages = self.paginator.paginate(
PaginationConfig={'StartingToken': starting_token})
complete = pages.build_full_result()
expected_response = {
"Users": ["User4", "User5"]
}
self.assertEqual(complete, expected_response)
self.method.assert_any_call(Marker={'key': b'\xfe', 'key2': b'\xee'})
class TestMultipleTokens(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something we'd see in s3 pagination.
self.paginate_config = {
"output_token": ["ListBucketResults.NextKeyMarker",
"ListBucketResults.NextUploadIdMarker"],
"input_token": ["key_marker", "upload_id_marker"],
"result_key": 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_s3_list_multipart_uploads(self):
responses = [
{"Foo": [1], "ListBucketResults": {"NextKeyMarker": "key1",
"NextUploadIdMarker": "up1"}},
{"Foo": [2], "ListBucketResults": {"NextKeyMarker": "key2",
"NextUploadIdMarker": "up2"}},
{"Foo": [3], "ListBucketResults": {"NextKeyMarker": "key3",
"NextUploadIdMarker": "up3"}},
{}
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(key_marker='key1', upload_id_marker='up1'),
mock.call(key_marker='key2', upload_id_marker='up2'),
mock.call(key_marker='key3', upload_id_marker='up3'),
])
class TestOptionalTokens(unittest.TestCase):
"""
Tests a paginator with an optional output token.
The Route53 ListResourceRecordSets paginator includes three output tokens,
one of which only appears in certain records. If this gets left in the
request params from a previous page, the API will skip over a record.
"""
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is based on Route53 pagination.
self.paginate_config = {
"output_token": ["NextRecordName",
"NextRecordType",
"NextRecordIdentifier"],
"input_token": ["StartRecordName",
"StartRecordType",
"StartRecordIdentifier"],
"result_key": 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_clean_token(self):
responses = [
{"Foo": [1],
"IsTruncated": True,
"NextRecordName": "aaa.example.com",
"NextRecordType": "A",
"NextRecordIdentifier": "id"},
{"Foo": [2],
"IsTruncated": True,
"NextRecordName": "bbb.example.com",
"NextRecordType": "A"},
{"Foo": [3],
"IsTruncated": False},
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(StartRecordName='aaa.example.com', StartRecordType='A',
StartRecordIdentifier='id'),
mock.call(StartRecordName='bbb.example.com', StartRecordType='A')
])
class TestKeyIterators(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something we'd see in s3 pagination.
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users"
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_result_key_iters(self):
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
iterators = pages.result_key_iters()
self.assertEqual(len(iterators), 1)
self.assertEqual(list(iterators[0]),
["User1", "User2", "User3"])
def test_build_full_result_with_single_key(self):
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete, {'Users': ['User1', 'User2', 'User3']})
def test_max_items_can_be_specified(self):
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
expected_token = encode_token({"Marker": "m1"})
self.assertEqual(
paginator.paginate(
PaginationConfig={'MaxItems': 1}).build_full_result(),
{'Users': ['User1'], 'NextToken': expected_token})
def test_max_items_as_strings(self):
# Some services (route53) model MaxItems as a string type.
# We need to be able to handle this case.
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
expected_token = encode_token({"Marker": "m1"})
self.assertEqual(
# Note MaxItems is a string here.
paginator.paginate(
PaginationConfig={'MaxItems': '1'}).build_full_result(),
{'Users': ['User1'], 'NextToken': expected_token})
def test_next_token_on_page_boundary(self):
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
expected_token = encode_token({"Marker": "m2"})
self.assertEqual(
paginator.paginate(
PaginationConfig={'MaxItems': 2}).build_full_result(),
{'Users': ['User1', 'User2'], 'NextToken': expected_token})
def test_max_items_can_be_specified_truncates_response(self):
# We're saying we only want 4 items, but notice that the second
# page of results returns users 4-6 so we have to truncated
# part of that second page.
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1", "User2", "User3"], "Marker": "m1"},
{"Users": ["User4", "User5", "User6"], "Marker": "m2"},
{"Users": ["User7"]},
]
self.method.side_effect = responses
expected_token = encode_token(
{"Marker": "m1", "boto_truncate_amount": 1})
self.assertEqual(
paginator.paginate(
PaginationConfig={'MaxItems': 4}).build_full_result(),
{'Users': ['User1', 'User2', 'User3', 'User4'],
'NextToken': expected_token})
def test_resume_next_marker_mid_page(self):
# This is a simulation of picking up from the response
# from test_MaxItems_can_be_specified_truncates_response
# We got the first 4 users, when we pick up we should get
# User5 - User7.
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User4", "User5", "User6"], "Marker": "m2"},
{"Users": ["User7"]},
]
self.method.side_effect = responses
starting_token = encode_token(
{"Marker": "m1", "boto_truncate_amount": 1})
pagination_config = {'StartingToken': starting_token}
self.assertEqual(
paginator.paginate(
PaginationConfig=pagination_config).build_full_result(),
{'Users': ['User5', 'User6', 'User7']})
self.assertEqual(
self.method.call_args_list,
[mock.call(Marker='m1'),
mock.call(Marker='m2')])
def test_max_items_exceeds_actual_amount(self):
# Because MaxItems=10 > number of users (3), we should just return
# all of the users.
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
self.assertEqual(
paginator.paginate(
PaginationConfig={'MaxItems': 10}).build_full_result(),
{'Users': ['User1', 'User2', 'User3']})
def test_bad_input_tokens(self):
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
with self.assertRaisesRegexp(ValueError, 'Bad starting token'):
pagination_config = {'StartingToken': 'does___not___work'}
self.paginator.paginate(
PaginationConfig=pagination_config).build_full_result()
class TestMultipleResultKeys(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something we'd see in s3 pagination.
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": ["Users", "Groups"],
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_build_full_result_with_multiple_result_keys(self):
responses = [
{"Users": ["User1"], "Groups": ["Group1"], "Marker": "m1"},
{"Users": ["User2"], "Groups": ["Group2"], "Marker": "m2"},
{"Users": ["User3"], "Groups": ["Group3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete,
{"Users": ['User1', 'User2', 'User3'],
"Groups": ['Group1', 'Group2', 'Group3']})
def test_build_full_result_with_different_length_result_keys(self):
responses = [
{"Users": ["User1"], "Groups": ["Group1"], "Marker": "m1"},
# Then we stop getting "Users" output, but we get more "Groups"
{"Users": [], "Groups": ["Group2"], "Marker": "m2"},
{"Users": [], "Groups": ["Group3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete,
{"Users": ['User1'],
"Groups": ['Group1', 'Group2', 'Group3']})
def test_build_full_result_with_zero_length_result_key(self):
responses = [
# In this case the 'Users' key is always empty but we should
# have a 'Users' key in the output, it should just have an
# empty list for a value.
{"Users": [], "Groups": ["Group1"], "Marker": "m1"},
{"Users": [], "Groups": ["Group2"], "Marker": "m2"},
{"Users": [], "Groups": ["Group3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete,
{"Users": [],
"Groups": ['Group1', 'Group2', 'Group3']})
def test_build_result_with_secondary_keys(self):
responses = [
{"Users": ["User1", "User2"],
"Groups": ["Group1", "Group2"],
"Marker": "m1"},
{"Users": ["User3"], "Groups": ["Group3"], "Marker": "m2"},
{"Users": ["User4"], "Groups": ["Group4"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1})
complete = pages.build_full_result()
expected_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
self.assertEqual(complete,
{"Users": ["User1"], "Groups": ["Group1", "Group2"],
"NextToken": expected_token})
def test_resume_with_secondary_keys(self):
# This is simulating a continutation of the previous test,
# test_build_result_with_secondary_keys. We use the
# token specified in the response "None___1" to continue where we
# left off.
responses = [
{"Users": ["User1", "User2"],
"Groups": ["Group1", "Group2"],
"Marker": "m1"},
{"Users": ["User3"], "Groups": ["Group3"], "Marker": "m2"},
{"Users": ["User4"], "Groups": ["Group4"]},
]
self.method.side_effect = responses
starting_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1,
'StartingToken': starting_token})
complete = pages.build_full_result()
# Note that the secondary keys ("Groups") are all truncated because
# they were in the original (first) response.
expected_token = encode_token({"Marker": "m1"})
self.assertEqual(complete,
{"Users": ["User2"], "Groups": [],
"NextToken": expected_token})
def test_resume_with_secondary_result_as_string(self):
self.method.return_value = {"Users": ["User1", "User2"], "Groups": "a"}
starting_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1, 'StartingToken': starting_token})
complete = pages.build_full_result()
# Note that the secondary keys ("Groups") becomes empty string because
# they were in the original (first) response.
self.assertEqual(complete, {"Users": ["User2"], "Groups": ""})
def test_resume_with_secondary_result_as_integer(self):
self.method.return_value = {"Users": ["User1", "User2"], "Groups": 123}
starting_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1, 'StartingToken': starting_token})
complete = pages.build_full_result()
# Note that the secondary keys ("Groups") becomes zero because
# they were in the original (first) response.
self.assertEqual(complete, {"Users": ["User2"], "Groups": 0})
class TestMultipleInputKeys(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# Probably the most complicated example we'll see:
# multiple input/output/result keys.
self.paginate_config = {
"output_token": ["Marker1", "Marker2"],
"input_token": ["InMarker1", "InMarker2"],
"result_key": ["Users", "Groups"],
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_build_full_result_with_multiple_input_keys(self):
responses = [
{"Users": ["User1", "User2"], "Groups": ["Group1"],
"Marker1": "m1", "Marker2": "m2"},
{"Users": ["User3", "User4"], "Groups": ["Group2"],
"Marker1": "m3", "Marker2": "m4"},
{"Users": ["User5"], "Groups": ["Group3"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token(
{"InMarker1": "m1", "InMarker2": "m2", "boto_truncate_amount": 1})
self.assertEqual(complete,
{"Users": ['User1', 'User2', 'User3'],
"Groups": ['Group1', 'Group2'],
"NextToken": expected_token})
def test_resume_with_multiple_input_keys(self):
responses = [
{"Users": ["User3", "User4"], "Groups": ["Group2"],
"Marker1": "m3", "Marker2": "m4"},
{"Users": ["User5"], "Groups": ["Group3"]},
]
self.method.side_effect = responses
starting_token = encode_token(
{"InMarker1": "m1", "InMarker2": "m2", "boto_truncate_amount": 1})
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1,
'StartingToken': starting_token})
complete = pages.build_full_result()
expected_token = encode_token(
{"InMarker1": "m3", "InMarker2": "m4"})
self.assertEqual(complete,
{"Users": ['User4'],
"Groups": [],
"NextToken": expected_token})
self.assertEqual(
self.method.call_args_list,
[mock.call(InMarker1='m1', InMarker2='m2')])
def test_resume_encounters_an_empty_payload(self):
response = {"not_a_result_key": "it happens in some service"}
self.method.return_value = response
starting_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
complete = self.paginator \
.paginate(PaginationConfig={'StartingToken': starting_token}) \
.build_full_result()
self.assertEqual(complete, {})
def test_result_key_exposed_on_paginator(self):
self.assertEqual(
[rk.expression for rk in self.paginator.result_keys],
['Users', 'Groups']
)
def test_result_key_exposed_on_page_iterator(self):
pages = self.paginator.paginate(MaxItems=3)
self.assertEqual(
[rk.expression for rk in pages.result_keys],
['Users', 'Groups']
)
class TestExpressionKeyIterators(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something like what we'd see in RDS.
self.paginate_config = {
"input_token": "Marker",
"output_token": "Marker",
"limit_key": "MaxRecords",
"result_key": "EngineDefaults.Parameters"
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
self.responses = [
{"EngineDefaults": {"Parameters": ["One", "Two"]},
"Marker": "m1"},
{"EngineDefaults": {"Parameters": ["Three", "Four"]},
"Marker": "m2"},
{"EngineDefaults": {"Parameters": ["Five"]}}
]
def test_result_key_iters(self):
self.method.side_effect = self.responses
pages = self.paginator.paginate()
iterators = pages.result_key_iters()
self.assertEqual(len(iterators), 1)
self.assertEqual(list(iterators[0]),
['One', 'Two', 'Three', 'Four', 'Five'])
def test_build_full_result_with_single_key(self):
self.method.side_effect = self.responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete, {
'EngineDefaults': {
'Parameters': ['One', 'Two', 'Three', 'Four', 'Five']
},
})
class TestIncludeResultKeys(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
'output_token': 'Marker',
'input_token': 'Marker',
'result_key': ['ResultKey', 'Count', 'Log'],
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_different_kinds_of_result_key(self):
self.method.side_effect = [
{'ResultKey': ['a'], 'Count': 1, 'Log': 'x', 'Marker': 'a'},
{'not_a_result_key': 'this page will be ignored', 'Marker': '_'},
{'ResultKey': ['b', 'c'], 'Count': 2, 'Log': 'y', 'Marker': 'b'},
{'ResultKey': ['d', 'e', 'f'], 'Count': 3, 'Log': 'z'},
]
pages = self.paginator.paginate()
expected = {
'ResultKey': ['a', 'b', 'c', 'd', 'e', 'f'],
'Count': 6,
'Log': 'xyz',
}
self.assertEqual(pages.build_full_result(), expected)
def test_result_key_is_missing(self):
self.method.side_effect = [
{'not_a_result_key': 'this page will be ignored', 'Marker': '_'},
{'neither_this_one': 'this page will be ignored, too'},
]
pages = self.paginator.paginate()
expected = {}
self.assertEqual(pages.build_full_result(), expected)
class TestIncludeNonResultKeys(unittest.TestCase):
maxDiff = None
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'ResultKey',
'non_aggregate_keys': ['NotResultKey'],
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_include_non_aggregate_keys(self):
self.method.side_effect = [
{'ResultKey': ['foo'], 'NotResultKey': 'a', 'NextToken': 't1'},
{'ResultKey': ['bar'], 'NotResultKey': 'a', 'NextToken': 't2'},
{'ResultKey': ['baz'], 'NotResultKey': 'a'},
]
pages = self.paginator.paginate()
actual = pages.build_full_result()
self.assertEqual(pages.non_aggregate_part, {'NotResultKey': 'a'})
expected = {
'ResultKey': ['foo', 'bar', 'baz'],
'NotResultKey': 'a',
}
self.assertEqual(actual, expected)
def test_include_with_multiple_result_keys(self):
self.paginate_config['result_key'] = ['ResultKey1', 'ResultKey2']
self.paginator = Paginator(self.method, self.paginate_config, self.model)
self.method.side_effect = [
{'ResultKey1': ['a', 'b'], 'ResultKey2': ['u', 'v'],
'NotResultKey': 'a', 'NextToken': 'token1'},
{'ResultKey1': ['c', 'd'], 'ResultKey2': ['w', 'x'],
'NotResultKey': 'a', 'NextToken': 'token2'},
{'ResultKey1': ['e', 'f'], 'ResultKey2': ['y', 'z'],
'NotResultKey': 'a'}
]
pages = self.paginator.paginate()
actual = pages.build_full_result()
expected = {
'ResultKey1': ['a', 'b', 'c', 'd', 'e', 'f'],
'ResultKey2': ['u', 'v', 'w', 'x', 'y', 'z'],
'NotResultKey': 'a',
}
self.assertEqual(actual, expected)
def test_include_with_nested_result_keys(self):
self.paginate_config['result_key'] = 'Result.Key'
self.paginate_config['non_aggregate_keys'] = [
'Outer', 'Result.Inner',
]
self.paginator = Paginator(self.method, self.paginate_config, self.model)
self.method.side_effect = [
# The non result keys shows hypothetical
# example. This doesn't actually happen,
# but in the case where the non result keys
# are different across pages, we use the values
# from the first page.
{'Result': {'Key': ['foo'], 'Inner': 'v1'},
'Outer': 'v2', 'NextToken': 't1'},
{'Result': {'Key': ['bar', 'baz'], 'Inner': 'v3'},
'Outer': 'v4', 'NextToken': 't2'},
{'Result': {'Key': ['qux'], 'Inner': 'v5'},
'Outer': 'v6'},
]
pages = self.paginator.paginate()
actual = pages.build_full_result()
self.assertEqual(pages.non_aggregate_part,
{'Outer': 'v2', 'Result': {'Inner': 'v1'}})
expected = {
'Result': {'Key': ['foo', 'bar', 'baz', 'qux'], 'Inner': 'v1'},
'Outer': 'v2',
}
self.assertEqual(actual, expected)
class TestSearchOverResults(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
'more_results': 'IsTruncated',
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{'Foo': [{'a': 1}, {'b': 2}],
'IsTruncated': True, 'NextToken': '1'},
{'Foo': [{'a': 3}, {'b': 4}],
'IsTruncated': True, 'NextToken': '2'},
{'Foo': [{'a': 5}], 'IsTruncated': False, 'NextToken': '3'}
]
self.method.side_effect = responses
def test_yields_non_list_values(self):
result = list(self.paginator.paginate().search('Foo[0].a'))
self.assertEqual([1, 3, 5], result)
def test_yields_individual_list_values(self):
result = list(self.paginator.paginate().search('Foo[].*[]'))
self.assertEqual([1, 2, 3, 4, 5], result)
def test_empty_when_no_match(self):
result = list(self.paginator.paginate().search('Foo[].qux'))
self.assertEqual([], result)
def test_no_yield_when_no_match_on_page(self):
result = list(self.paginator.paginate().search('Foo[].b'))
self.assertEqual([2, 4], result)
class TestDeprecatedStartingToken(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
def create_paginator(self, multiple_tokens=False):
if multiple_tokens:
paginator_config = {
"output_token": ["Marker1", "Marker2"],
"input_token": ["InMarker1", "InMarker2"],
"result_key": ["Users", "Groups"],
}
else:
paginator_config = {
'output_token': 'Marker',
'input_token': 'Marker',
'result_key': 'Users',
}
return Paginator(self.method, paginator_config, self.model)
def assert_pagination_result(self, expected, pagination_config,
multiple_tokens=False):
paginator = self.create_paginator(multiple_tokens)
try:
actual = paginator.paginate(
PaginationConfig=pagination_config).build_full_result()
self.assertEqual(actual, expected)
except ValueError:
self.fail("Deprecated paginator failed.")
def test_deprecated_starting_token(self):
responses = [
{"Users": ["User1"], "Marker": "m2"},
{"Users": ["User2"], "Marker": "m3"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm1___0'}
expected = {'Users': ['User1', 'User2', 'User3']}
self.assert_pagination_result(expected, pagination_config)
def test_deprecated_multiple_starting_token(self):
responses = [
{
"Users": ["User1", "User2"],
"Groups": ["Group1"],
"Marker1": "m1",
"Marker2": "m2"
},
{
"Users": ["User3", "User4"],
"Groups": ["Group2"],
"Marker1": "m3",
"Marker2": "m4"
},
{
"Users": ["User5"],
"Groups": ["Group3"]
}
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm0___m0___1'}
expected = {
'Groups': ['Group2', 'Group3'],
'Users': ['User2', 'User3', 'User4', 'User5']
}
self.assert_pagination_result(
expected, pagination_config, multiple_tokens=True)
def test_deprecated_starting_token_returns_new_style_next_token(self):
responses = [
{"Users": ["User1"], "Marker": "m2"},
{"Users": ["User2"], "Marker": "m3"},
{"Users": ["User3"], "Marker": "m4"},
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm1___0', 'MaxItems': 3}
expected = {
'Users': ['User1', 'User2', 'User3'],
'NextToken': encode_token({'Marker': 'm4'})
}
self.assert_pagination_result(expected, pagination_config)
def test_deprecated_starting_token_without_all_input_set_to_none(self):
responses = [
{
"Users": ["User1", "User2"],
"Groups": ["Group1"],
"Marker1": "m1",
"Marker2": "m2"
},
{
"Users": ["User3", "User4"],
"Groups": ["Group2"],
"Marker1": "m3",
"Marker2": "m4"
},
{
"Users": ["User5"],
"Groups": ["Group3"]
}
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm0'}
expected = {
'Groups': ['Group2', 'Group3'],
'Users': ['User1', 'User2', 'User3', 'User4', 'User5']
}
self.assert_pagination_result(
expected, pagination_config, multiple_tokens=True)
def test_deprecated_starting_token_rejects_too_many_input_tokens(self):
responses = [
{"Users": ["User1"], "Marker": "m2"},
{"Users": ["User2"], "Marker": "m3"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm1___m4___0'}
expected = {'Users': ['User1', 'User2', 'User3']}
paginator = self.create_paginator()
with self.assertRaises(ValueError):
actual = paginator.paginate(
PaginationConfig=pagination_config).build_full_result()
self.assertEqual(actual, expected)
class TestStringPageSize(unittest.TestCase):
def setUp(self):
self.service_model = {
'metadata': {
'protocol': 'query',
'endpointPrefix': 'prefix'
},
'documentation': 'best service ever',
'operations': {
'ListStuff': {
'name': 'ListStuff',
'http': {
'method': 'GET',
'requestUri': '/things'
},
'input': {'shape': 'ListStuffInputShape'},
'output': {'shape': 'ListStuffOutputShape'},
'errors': [],
'documentation': 'Lists stuff'
}
},
'shapes': {
'String': {'type': 'string'},
'ListOfStuff': {
'type': 'list',
'member': {'type': 'string'}
},
'ListStuffInputShape': {
'type': 'structure',
'required': [],
'members': {
'NextToken': {'shape': 'String'},
'MaxItems': {'shape': 'String'}
}
},
'ListStuffOutputShape': {
'type': 'structure',
'required': [],
'members': {
'NextToken': {'shape': 'String'},
'Stuff': {'shape': 'ListOfStuff'},
'IsTruncated': {'type': 'boolean'}
},
}
}
}
self.paginate_config = {
'input_token': 'NextToken',
'output_token': 'NextToken',
'limit_key': 'MaxItems',
'result_key': 'Stuff',
}
self.service = model.ServiceModel(self.service_model)
self.model = self.service.operation_model('ListStuff')
self.method = mock.Mock()
self.method.side_effect = [{}]
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_int_page_size(self):
res = list(self.paginator.paginate(PaginationConfig={'PageSize': 1}))
self.method.assert_called_with(MaxItems='1')
def test_str_page_size(self):
res = list(self.paginator.paginate(PaginationConfig={'PageSize': '1'}))
self.method.assert_called_with(MaxItems='1')
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "a5835cf919f381acb8983b0d61433cb5",
"timestamp": "",
"source": "github",
"line_count": 1407,
"max_line_length": 83,
"avg_line_length": 38.837242359630416,
"alnum_prop": 0.5247053656394115,
"repo_name": "pplu/botocore",
"id": "bf8666b4ae4dd4869631672f27c6c19bda219731",
"size": "55211",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/unit/test_paginate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "23824"
},
{
"name": "Python",
"bytes": "2691062"
}
],
"symlink_target": ""
} |
"""Stage the Chromium checkout to update CTS test version."""
import contextlib
import json
import operator
import os
import re
import sys
import tempfile
import threading
try:
# Workaround for py2/3 compatibility.
# TODO(pbirk): remove once py2 support is no longer needed.
import urllib.request as urllib_request
except ImportError:
import urllib as urllib_request
import zipfile
sys.path.append(
os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir, 'third_party',
'catapult', 'devil'))
# pylint: disable=wrong-import-position,import-error
from devil.utils import cmd_helper
sys.path.append(
os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir, 'third_party',
'catapult', 'common', 'py_utils'))
# pylint: disable=wrong-import-position,import-error
from py_utils import tempfile_ext
SRC_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
TOOLS_DIR = os.path.join('android_webview', 'tools')
CONFIG_FILE = os.path.join('cts_config', 'webview_cts_gcs_path.json')
CONFIG_PATH = os.path.join(SRC_DIR, TOOLS_DIR, CONFIG_FILE)
CIPD_FILE = os.path.join('cts_archive', 'cipd.yaml')
CIPD_PATH = os.path.join(SRC_DIR, TOOLS_DIR, CIPD_FILE)
DEPS_FILE = 'DEPS'
TEST_SUITES_FILE = os.path.join('testing', 'buildbot', 'test_suites.pyl')
# Android desserts that are no longer receiving CTS updates at
# https://source.android.com/compatibility/cts/downloads
# Please update this list as more versions reach end-of-service.
END_OF_SERVICE_DESSERTS = ['M']
CTS_DEP_NAME = 'src/android_webview/tools/cts_archive'
CTS_DEP_PACKAGE = 'chromium/android_webview/tools/cts_archive'
CIPD_REFERRERS = [DEPS_FILE, TEST_SUITES_FILE]
_GENERATE_BUILDBOT_JSON = os.path.join('testing', 'buildbot',
'generate_buildbot_json.py')
_ENSURE_FORMAT = """$ParanoidMode CheckIntegrity
@Subdir cipd
{} {}"""
_ENSURE_SUBDIR = 'cipd'
_RE_COMMENT_OR_BLANK = re.compile(r'^ *(#.*)?$')
class CTSConfig:
"""Represents a CTS config file."""
def __init__(self, file_path=CONFIG_PATH):
"""Constructs a representation of the CTS config file.
Only read operations are provided by this object. Users should edit the
file manually for any modifications.
Args:
file_path: Path to file.
"""
self._path = os.path.abspath(file_path)
with open(self._path) as f:
self._config = json.load(f)
def get_platforms(self):
return sorted(self._config.keys())
def get_archs(self, platform):
return sorted(self._config[platform]['arch'].keys())
def iter_platform_archs(self):
for p in self.get_platforms():
for a in self.get_archs(p):
yield p, a
def get_cipd_zip(self, platform, arch):
return self._config[platform]['arch'][arch]['filename']
def get_origin(self, platform, arch):
return self._config[platform]['arch'][arch]['_origin']
def get_origin_zip(self, platform, arch):
return os.path.basename(self.get_origin(platform, arch))
def get_apks(self, platform):
return sorted([r['apk'] for r in self._config[platform]['test_runs']])
class CTSCIPDYaml:
"""Represents a CTS CIPD yaml file."""
RE_PACKAGE = r'^package:\s*(\S+)\s*$'
RE_DESC = r'^description:\s*(.+)$'
RE_DATA = r'^data:\s*$'
RE_FILE = r'^\s+-\s+file:\s*(.+)$'
# TODO(crbug.com/1049432): Replace with yaml parser
@classmethod
def parse(cls, lines):
result = {}
for line in lines:
if len(line) == 0 or line[0] == '#':
continue
package_match = re.match(cls.RE_PACKAGE, line)
if package_match:
result['package'] = package_match.group(1)
continue
desc_match = re.match(cls.RE_DESC, line)
if desc_match:
result['description'] = desc_match.group(1)
continue
if re.match(cls.RE_DATA, line):
result['data'] = []
if 'data' in result:
file_match = re.match(cls.RE_FILE, line)
if file_match:
result['data'].append({'file': file_match.group(1)})
return result
def __init__(self, file_path=CIPD_PATH):
"""Constructs a representation of CTS CIPD yaml file.
Note the file won't be modified unless write is called
with its path.
Args:
file_path: Path to file.
"""
self._path = os.path.abspath(file_path)
self._header = []
# Read header comments
with open(self._path) as f:
for l in f.readlines():
if re.match(_RE_COMMENT_OR_BLANK, l):
self._header.append(l)
else:
break
# Read yaml data
with open(self._path) as f:
self._yaml = CTSCIPDYaml.parse(f.readlines())
def get_file_path(self):
"""Get full file path of yaml file that this was constructed from."""
return self._path
def get_file_basename(self):
"""Get base file name that this was constructed from."""
return os.path.basename(self._path)
def get_package(self):
"""Get package name."""
return self._yaml['package']
def clear_files(self):
"""Clears all files in file (only in local memory, does not modify file)."""
self._yaml['data'] = []
def append_file(self, file_name):
"""Add file_name to list of files."""
self._yaml['data'].append({'file': str(file_name)})
def remove_file(self, file_name):
"""Remove file_name from list of files."""
old_file_names = self.get_files()
new_file_names = [name for name in old_file_names if name != file_name]
self._yaml['data'] = [{'file': name} for name in new_file_names]
def get_files(self):
"""Get list of files in yaml file."""
return [e['file'] for e in self._yaml['data']]
def write(self, file_path):
"""(Over)write file_path with the cipd.yaml representation."""
dir_name = os.path.dirname(file_path)
if not os.path.isdir(dir_name):
os.makedirs(dir_name)
with open(file_path, 'w') as f:
f.writelines(self._get_yamls())
def _get_yamls(self):
"""Return the cipd.yaml file contents of this object."""
output = []
output += self._header
output.append('package: {}\n'.format(self._yaml['package']))
output.append('description: {}\n'.format(self._yaml['description']))
output.append('data:\n')
for d in sorted(self._yaml['data'], key=operator.itemgetter('file')):
output.append(' - file: {}\n'.format(d.get('file')))
return output
def cipd_ensure(package, version, root_dir):
"""Ensures CIPD package is installed at root_dir.
Args:
package: CIPD name of package
version: Package version
root_dir: Directory to install package into
"""
def _createEnsureFile(package, version, file_path):
with open(file_path, 'w') as f:
f.write(_ENSURE_FORMAT.format(package, version))
def _ensure(root, ensure_file):
ret = cmd_helper.RunCmd(
['cipd', 'ensure', '-root', root, '-ensure-file', ensure_file])
if ret:
raise IOError('Error while running cipd ensure: ' + ret)
with tempfile.NamedTemporaryFile() as f:
_createEnsureFile(package, version, f.name)
_ensure(root_dir, f.name)
def cipd_download(cipd, version, download_dir):
"""Downloads CIPD package files.
This is different from cipd ensure in that actual files will exist at
download_dir instead of symlinks.
Args:
cipd: CTSCIPDYaml object
version: Version of package
download_dir: Destination directory
"""
package = cipd.get_package()
download_dir_abs = os.path.abspath(download_dir)
if not os.path.isdir(download_dir_abs):
os.makedirs(download_dir_abs)
with tempfile_ext.NamedTemporaryDirectory() as workDir, chdir(workDir):
cipd_ensure(package, version, '.')
for file_name in cipd.get_files():
src_path = os.path.join(_ENSURE_SUBDIR, file_name)
dest_path = os.path.join(download_dir_abs, file_name)
dest_dir = os.path.dirname(dest_path)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
ret = cmd_helper.RunCmd(['cp', '--reflink=never', src_path, dest_path])
if ret:
raise IOError('Error file copy from ' + file_name + ' to ' + dest_path)
def filter_cts_file(cts_config, cts_zip_file, dest_dir):
"""Filters out non-webview test apks from downloaded CTS zip file.
Args:
cts_config: CTSConfig object
cts_zip_file: Path to downloaded CTS zip, retaining the original filename
dest_dir: Destination directory to filter to, filename will be unchanged
"""
for p in cts_config.get_platforms():
for a in cts_config.get_archs(p):
o = cts_config.get_origin(p, a)
base_name = os.path.basename(o)
if base_name == os.path.basename(cts_zip_file):
filterzip(cts_zip_file, cts_config.get_apks(p),
os.path.join(dest_dir, base_name))
return
raise ValueError('Could not find platform and arch for: ' + cts_zip_file)
class ChromiumRepoHelper:
"""Performs operations on Chromium checkout."""
def __init__(self, root_dir=SRC_DIR):
self._root_dir = os.path.abspath(root_dir)
self._cipd_referrers = [
os.path.join(self._root_dir, p) for p in CIPD_REFERRERS
]
@property
def cipd_referrers(self):
return self._cipd_referrers
@property
def cts_cipd_package(self):
return CTS_DEP_PACKAGE
def get_cipd_dependency_rev(self):
"""Return CTS CIPD revision in the checkout's DEPS file."""
deps_file = os.path.join(self._root_dir, DEPS_FILE)
# Use the gclient command instead of gclient_eval since the latter is not
# intended for direct use outside of depot_tools.
cmd = [
'gclient', 'getdep', '--revision',
'%s:%s' % (CTS_DEP_NAME, CTS_DEP_PACKAGE), '--deps-file', deps_file
]
env = os.environ
# Disable auto-update of depot tools since update_depot_tools may not be
# available (for example, on the presubmit bot), and it's probably best not
# to perform surprise updates anyways.
env.update({'DEPOT_TOOLS_UPDATE': '0'})
status, output, err = cmd_helper.GetCmdStatusOutputAndError(cmd, env=env)
if status != 0:
raise Exception('Command "%s" failed: %s' % (' '.join(cmd), err))
return output.strip()
def update_cts_cipd_rev(self, new_version):
"""Update references to CTS CIPD revision in checkout.
Args:
new_version: New version to use
"""
old_version = self.get_cipd_dependency_rev()
for path in self.cipd_referrers:
replace_cipd_revision(path, old_version, new_version)
def git_status(self, path):
"""Returns canonical git status of file.
Args:
path: Path to file.
Returns:
Output of git status --porcelain.
"""
with chdir(self._root_dir):
output = cmd_helper.GetCmdOutput(['git', 'status', '--porcelain', path])
return output
def update_testing_json(self):
"""Performs generate_buildbot_json.py.
Raises:
IOError: If generation failed.
"""
with chdir(self._root_dir):
ret = cmd_helper.RunCmd(['python', _GENERATE_BUILDBOT_JSON])
if ret:
raise IOError('Error while generating_buildbot_json.py')
def rebase(self, *rel_path_parts):
"""Construct absolute path from parts relative to root_dir.
Args:
rel_path_parts: Parts of the root relative path.
Returns:
The absolute path.
"""
return os.path.join(self._root_dir, *rel_path_parts)
def replace_cipd_revision(file_path, old_revision, new_revision):
"""Replaces cipd revision strings in file.
Args:
file_path: Path to file.
old_revision: Old cipd revision to be replaced.
new_revision: New cipd revision to use as replacement.
Returns:
Number of replaced occurrences.
Raises:
IOError: If no occurrences were found.
"""
with open(file_path) as f:
contents = f.read()
num = contents.count(old_revision)
if not num:
raise IOError('Did not find old CIPD revision {} in {}'.format(
old_revision, file_path))
newcontents = contents.replace(old_revision, new_revision)
with open(file_path, 'w') as f:
f.write(newcontents)
return num
@contextlib.contextmanager
def chdir(dirPath):
"""Context manager that changes working directory."""
cwd = os.getcwd()
os.chdir(dirPath)
try:
yield
finally:
os.chdir(cwd)
def filterzip(inputPath, pathList, outputPath):
"""Copy a subset of files from input archive into output archive.
Args:
inputPath: Input archive path
pathList: List of file names from input archive to copy
outputPath: Output archive path
"""
with zipfile.ZipFile(os.path.abspath(inputPath), 'r') as inputZip,\
zipfile.ZipFile(os.path.abspath(outputPath), 'w') as outputZip,\
tempfile_ext.NamedTemporaryDirectory() as workDir,\
chdir(workDir):
for p in pathList:
inputZip.extract(p)
outputZip.write(p)
def download(url, destination):
"""Asynchronously download url to path specified by destination.
Args:
url: Url location of file.
destination: Path where file should be saved to.
If destination parent directories do not exist, they will be created.
Returns the download thread which can then be joined by the caller to
wait for download completion.
"""
dest_dir = os.path.dirname(destination)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
t = threading.Thread(target=urllib_request.urlretrieve,
args=(url, destination))
t.start()
return t
def update_cipd_package(cipd_yaml_path):
"""Updates the CIPD package specified by cipd_yaml_path.
Args:
cipd_yaml_path: Path of cipd yaml specification file
"""
cipd_yaml_path_abs = os.path.abspath(cipd_yaml_path)
with chdir(os.path.dirname(cipd_yaml_path_abs)),\
tempfile.NamedTemporaryFile() as jsonOut:
ret = cmd_helper.RunCmd([
'cipd', 'create', '-pkg-def', cipd_yaml_path_abs, '-json-output',
jsonOut.name
])
if ret:
raise IOError('Error during cipd create.')
return json.load(jsonOut)['result']['instance_id']
| {
"content_hash": "14129cc290df476792d78a8172597bb0",
"timestamp": "",
"source": "github",
"line_count": 459,
"max_line_length": 80,
"avg_line_length": 30.501089324618736,
"alnum_prop": 0.6565,
"repo_name": "scheib/chromium",
"id": "bc1610c905853d79f562861e44c8be5927c7b8ab",
"size": "14162",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "android_webview/tools/cts_utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import functools
import itertools
import logging
import pickle
import threading
import time
import uuid
from collections import namedtuple
from datetime import timedelta
import click
import psycopg2
from concurrent.futures import (
Future,
TimeoutError,
)
from kazoo.client import KazooState
from kazoo.recipe.watchers import DataWatch
from pgshovel import __version__
from pgshovel.database import ManagedDatabase
from pgshovel.interfaces.common_pb2 import (
BatchIdentifier,
Snapshot,
Tick,
Timestamp,
)
from pgshovel.interfaces.configurations_pb2 import (
ClusterConfiguration,
ReplicationSetConfiguration,
)
from pgshovel.interfaces.streams_pb2 import (
BeginOperation,
MutationOperation,
)
from pgshovel.streams.publisher import Publisher
from pgshovel.utilities.conversions import (
row_converter,
to_snapshot,
to_timestamp,
)
from pgshovel.utilities.protobuf import BinaryCodec
logger = logging.getLogger(__name__)
BATCH_INFO_STATEMENT = """
SELECT
start_tick.tick_id,
start_tick.tick_snapshot,
extract(epoch from start_tick.tick_time),
end_tick.tick_id,
end_tick.tick_snapshot,
extract(epoch from end_tick.tick_time)
FROM
pgq.get_batch_info(%s) batch,
pgq.tick start_tick,
pgq.tick end_tick
WHERE
start_tick.tick_id = batch.prev_tick_id AND end_tick.tick_id = batch.tick_id
"""
def to_mutation(row):
id, payload, timestamp, transaction = row
version, payload = payload.split(':', 1)
if version != '0':
raise RuntimeError('Cannot parse payload version: %s', version)
(schema, table), operation, primary_key_columns, (old, new), configuration_version = pickle.loads(payload)
states = {}
if old:
states['old'] = row_converter.to_protobuf(old)
if new:
states['new'] = row_converter.to_protobuf(new)
assert states, 'at least one state must be set'
return MutationOperation(
id=id,
schema=schema,
table=table,
operation=getattr(MutationOperation, operation),
identity_columns=primary_key_columns,
timestamp=to_timestamp(timestamp),
transaction=transaction,
**states
)
class Worker(threading.Thread):
def __init__(self, cluster, dsn, set, consumer, handler):
super(Worker, self).__init__(name=dsn)
self.daemon = True
self.cluster = cluster
self.database = ManagedDatabase(cluster, dsn)
self.set = set
self.consumer = consumer
self.handler = handler
self.__stop_requested = threading.Event()
self.__result = Future()
self.__result.set_running_or_notify_cancel() # cannot be cancelled
def run(self):
publisher = Publisher(self.handler.push)
try:
logger.debug('Started worker.')
# TODO: this connection needs to timeout in case the lock cannot be
# grabbed or the connection cannot be established to avoid never
# exiting
logger.info('Registering as queue consumer...')
with self.database.connection() as connection, connection.cursor() as cursor:
statement = "SELECT * FROM pgq.register_consumer(%s, %s)"
cursor.execute(statement, (self.cluster.get_queue_name(self.set), self.consumer))
(new,) = cursor.fetchone()
logger.info('Registered as queue consumer: %s (%s registration).', self.consumer, 'new' if new else 'existing')
connection.commit()
logger.info('Ready to relay events.')
while True:
if self.__stop_requested.wait(0.01):
break
# TODO: this needs a timeout as well
# TODO: this probably should have a lock on consumption
with self.database.connection() as connection:
# Check to see if there is a batch available to be relayed.
statement = "SELECT batch_id FROM pgq.next_batch_info(%s, %s)"
with connection.cursor() as cursor:
cursor.execute(statement, (self.cluster.get_queue_name(self.set), self.consumer,))
(batch_id,) = cursor.fetchone()
if batch_id is None:
connection.commit()
continue # There is nothing to consume.
# Fetch the details of the batch.
with connection.cursor() as cursor:
cursor.execute(BATCH_INFO_STATEMENT, (batch_id,))
start_id, start_snapshot, start_timestamp, end_id, end_snapshot, end_timestamp = cursor.fetchone()
batch = BatchIdentifier(
id=batch_id,
node=self.database.id.bytes,
)
begin = BeginOperation(
start=Tick(
id=start_id,
snapshot=to_snapshot(start_snapshot),
timestamp=to_timestamp(start_timestamp),
),
end=Tick(
id=end_id,
snapshot=to_snapshot(end_snapshot),
timestamp=to_timestamp(end_timestamp),
),
)
with publisher.batch(batch, begin) as publish:
# Fetch the events for the batch. This uses a named cursor
# to avoid having to load the entire event block into
# memory at once.
with connection.cursor('events') as cursor:
statement = "SELECT ev_id, ev_data, extract(epoch from ev_time), ev_txid FROM pgq.get_batch_events(%s)"
cursor.execute(statement, (batch_id,))
for mutation in itertools.imap(to_mutation, cursor):
publish(mutation)
with connection.cursor() as cursor:
cursor.execute("SELECT * FROM pgq.finish_batch(%s)", (batch_id,))
(success,) = cursor.fetchone()
# XXX: Not sure why this could happen?
if not success:
raise RuntimeError('Could not close batch!')
# XXX: Since this is outside of the batch block, this
# downstream consumers need to be able to handle receiving
# the same transaction multiple times, probably by checking
# a metadata table before starting to apply a batch.
connection.commit()
logger.debug('Successfully relayed batch %s.', batch)
except Exception as error:
logger.exception('Caught exception in worker: %s', error)
self.__result.set_exception(error)
else:
logger.debug('Stopped.')
self.__result.set_result(None)
def result(self, timeout=None):
return self.__result.result(timeout)
def stop_async(self):
logger.debug('Requesting stop...')
self.__stop_requested.set()
return self.__result
RECOVERABLE_ERRORS = (psycopg2.OperationalError,)
WorkerState = namedtuple('WorkerState', 'worker time')
class Relay(threading.Thread):
def __init__(self, cluster, set, consumer, handler, throttle=10):
super(Relay, self).__init__(name='relay')
self.daemon = True
self.cluster = cluster
self.set = set
self.consumer = consumer
self.handler = handler
self.throttle = throttle
self.__stop_requested = threading.Event()
self.__result = Future()
self.__result.set_running_or_notify_cancel() # cannot be cancelled
self.__worker_state_lock = threading.Lock()
self.__worker_state = None
def run(self):
try:
logger.debug('Started relay (cluster: %s, set: %s) using %s.', self.cluster, self.set, self.handler)
def __handle_session_state_change(state):
if state == KazooState.SUSPENDED:
# TODO: This should exit cleanly but then raise, maybe?
# TODO: Ideally this would pause all processing, and
# continue if we recover (rather than lose the session.)
logger.warning('Lost connection to ZooKeeper! Requesting exit...')
self.__stop_requested.set()
self.cluster.zookeeper.add_listener(__handle_session_state_change)
# XXX: This needs to be implemented, but right now there is a race
# condition that can cause the relay to never exit if the watch is
# established against a dead/unresponsive ZooKeeper ensemble.
# def __handle_cluster_version_change(data, stat):
# if not data:
# logger.warning('Received no cluster configuration data! Requesting exit...')
# self.__stop_requested.set()
# return False
# configuration = BinaryCodec(ClusterConfiguration).decode(data)
# if __version__ != configuration.version:
# logger.warning('Cluster and local versions do not match (cluster: %s, local: %s)! Requesting exit...', cluster, __version__)
# self.__stop_requested.set()
# return False
# logger.debug('Checking cluster version...')
# DataWatch(
# self.cluster.zookeeper,
# self.cluster.path,
# __handle_cluster_version_change,
# )
stopping = []
# XXX just store the config
def start_worker(dsn):
worker = Worker(self.cluster, dsn, self.set, self.consumer, self.handler)
worker.start()
return WorkerState(worker, time.time())
def __handle_state_change(data, stat):
if self.__stop_requested.is_set():
return False # we're exiting anyway, don't do anything
if data is None:
# TODO: it would probably make sense for this to have an exit code
logger.warning('Received no replication set configuration data! Requesting exit...')
self.__stop_requested.set()
return False
logger.debug('Recieved an update to replication set configuration.')
configuration = BinaryCodec(ReplicationSetConfiguration).decode(data)
with self.__worker_state_lock:
# TODO: this is annoying and repetative and should be cleaned up
if self.__worker_state is None:
self.__worker_state = start_worker(configuration.database.dsn)
elif self.__worker_state.worker.database.dsn != configuration.database.dsn:
self.__worker_state.worker.stop_async()
stopping.append(WorkerState(self.__worker_state.worker, time.time()))
self.__worker_state = start_worker(configuration.database.dsn)
logger.debug('Fetching replication set configuration...')
DataWatch(
self.cluster.zookeeper,
self.cluster.get_set_path(self.set),
__handle_state_change,
)
while True:
if self.__stop_requested.wait(0.01):
break
# TODO: check up on stopping workers (ideally there are none)
with self.__worker_state_lock:
if self.__worker_state is not None and not self.__worker_state.worker.is_alive():
try:
self.__worker_state.worker.result(0)
except RECOVERABLE_ERRORS as error:
if time.time() > (self.__worker_state.time + self.throttle):
logger.info('Trying to restart %r, previously exited with recoverable error: %s', self.__worker_state.worker, error)
# TODO: hack, make a restart method
self.__worker_state = start_worker(self.__worker_state.worker.database.dsn)
else:
# otherwise, exit immediately
raise RuntimeError('Found unexpected dead worker: %r' % (self.__worker_state.worker,))
with self.__worker_state_lock:
if self.__worker_state is not None:
logger.debug('Stopping worker...')
future = self.__worker_state.worker.stop_async()
timeout = 10
logger.debug('Waiting up to %d seconds for worker to finish...', timeout)
try:
future.result(timeout)
except TimeoutError:
logger.warning('Exiting with worker still running!')
else:
logger.info('Worker exited cleanly.')
except Exception as error:
logger.exception('Caught exception in relay: %s', error)
self.__result.set_exception(error)
else:
logger.debug('Stopped.')
self.__result.set_result(None)
def result(self, timeout=None):
return self.__result.result(timeout)
def stop_async(self):
logger.debug('Requesting stop...')
self.__stop_requested.set()
return self.__result
| {
"content_hash": "d5d8743fd5e5c88f726fb90ed7de8e11",
"timestamp": "",
"source": "github",
"line_count": 357,
"max_line_length": 148,
"avg_line_length": 38.83473389355742,
"alnum_prop": 0.5552510098095788,
"repo_name": "fuziontech/pgshovel",
"id": "b11de34e90af97b3eeb10188a26319a735a61fb6",
"size": "13864",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/python/pgshovel/relay/relay.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "467"
},
{
"name": "Makefile",
"bytes": "1040"
},
{
"name": "Protocol Buffer",
"bytes": "4369"
},
{
"name": "Python",
"bytes": "110502"
},
{
"name": "Shell",
"bytes": "758"
}
],
"symlink_target": ""
} |
"""
Resource tests.
"""
from hamcrest import (
assert_that,
equal_to,
has_length,
is_,
)
from consulchecknagiosplugin.resources import ConsulCheckHealth, ConsulCheck
from consulchecknagiosplugin.tests.fixtures import mocked_get, NODE, SERF_CHECK_ID
OUTPUT = "output"
def test_parse_check_health():
dct = {
"CheckID": SERF_CHECK_ID,
"Status": "passing",
"Output": OUTPUT,
}
value = ConsulCheckHealth.from_dict(dct)
assert_that(value.code, is_(equal_to(0)))
assert_that(value.output, is_(equal_to(OUTPUT)))
def test_get_node_health():
check = ConsulCheck(
node=NODE,
check_id=SERF_CHECK_ID,
)
with mocked_get(check):
node_health = check.get_node_health()
assert_that(node_health, has_length(2))
def test_get_check_health():
check = ConsulCheck(
node=NODE,
check_id=SERF_CHECK_ID,
)
with mocked_get(check):
check_health = check.get_check_health()
assert_that(check_health.code, is_(equal_to(0)))
def test_probe():
check = ConsulCheck(
node=NODE,
check_id=SERF_CHECK_ID,
)
with mocked_get(check):
metrics = list(check.probe())
assert_that(metrics, has_length(1))
assert_that(metrics[0].value.code, is_(equal_to(0)))
| {
"content_hash": "967498a21ca271dce40836535e2b971e",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 82,
"avg_line_length": 22.82758620689655,
"alnum_prop": 0.6185800604229608,
"repo_name": "locationlabs/consul-check-nagios-plugin",
"id": "ba498c95fab8238f9cc2e4738c9e42db907448a2",
"size": "1324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "consulchecknagiosplugin/tests/test_resources.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14571"
}
],
"symlink_target": ""
} |
"""
Training symbol detectors via Linear SVM
"""
import cv2
import numpy as np
import random
from symbol import LOC
from sklearn.svm import SVC
from sklearn.externals import joblib
from utils import create_symbol_with_center_loc, get_sub_im
from feature import hog, pixel_vec
def read_annotations(annotation_file_path):
"""
Read annotations given the annotation file path.
:param annotation_file_path: path to annotation file
:type annotation_file_path: string
:return: annotations
:rtype: dict{label: [locations]}
"""
annotations = {}
try:
with open(annotation_file_path, "r") as f:
lines = f.readlines()
for line in lines:
parsed = [x.strip() for x in line.split()]
label = parsed[0]
loc = LOC(int(parsed[1]), int(parsed[2]))
if label not in annotations:
annotations[label] = [loc]
else:
annotations[label].append(loc)
except Exception:
print "nothing"
return annotations
def is_in_loc_list(loc, ll):
"""
Determine whether a location is in a location list
:param loc: target location
:type loc: LOC
:param ll: location list
:type ll: [LOC]
:return: True or False
:rtype: Boolean
"""
list = [[l.get_x(), l.get_y()] for l in ll]
if [loc.get_x(), loc.get_y()] in list:
return True
else:
return False
def prepare_background_data(im, locs):
"""
Crop subimages to prepare background data
:param im: whole image
:type im: cv2.image
:param locs: locations for the annotated symbols
:type locs: [LOC]
:return: [background imgs], [background labels]
:rtype: [cv2.image], [string]
"""
(rows, cols) = im.shape
num = 0
tot_neg_num = 1000
data = []
labels = []
while num < tot_neg_num:
y = random.randint(0, rows)
x = random.randint(0, cols)
l = LOC( x,y )
if not is_in_loc_list(l, locs):
s = create_symbol_with_center_loc("background", l)
sub_im = get_sub_im(im, s)
if sub_im is not None:
data.append(get_sub_im(im, s))
num += 1
labels.append("background")
return data, labels
def prepare_data_from_annotation(im, annotations):
"""
Crop subimages for annotated symbols
:param im: whole image
:type im: cv2.image
:param annotations: annotation dict
:type annotations: dict{label: [LOC]}
:return: [symbol images], [symbol labels]
:rtype: [cv2.image], [string]
"""
data = []
labels = []
try:
symbols = []
pos_locs = []
for label in annotations.keys():
locs = annotations[label]
pos_locs = pos_locs + locs
sl = [create_symbol_with_center_loc(label, loc) for loc in locs]
symbols = symbols + sl
data_label_pair = [(get_sub_im(im, s), s.get_label()) for s in symbols if s is not None]
data = [d for (d,l) in data_label_pair if d is not None]
labels = [l for (d,l) in data_label_pair if d is not None]
bk_data, bk_labels = prepare_background_data(im, pos_locs)
labels += bk_labels
data += bk_data
print len(labels), len(data)
except Exception:
print "prepare annotations"
return data, labels
def train_svm(img_file_path, annotation_file_path, detector_name):
"""
Train SVM classifier given symbol annotations and feature extractor.
SVM parameters: kernel='linear', C=2.67, decision_function_shape= "ovo".
Trained model will be saved as "../models/?_svm.pkl", ? is the feature extraction method.
:param img_file_path: path to score image
:type img_file_path: string
:param annotation_file_path: path to annotations
:type annotation_file_path: string
:param detector_name: "hog" or "pixel" -- two types of features
:type detector_name: string
"""
im = cv2.imread(img_file_path, 0)
annotations = read_annotations(annotation_file_path)
img_data, labels = prepare_data_from_annotation(im, annotations)
if detector_name == "hog":
features = [hog(im) for im in img_data]
elif detector_name == "pixel":
features = [pixel_vec(im) for im in img_data]
else:
print("unknown detector.")
raise Exception
train_data = np.array(np.float32(features))
labels = np.array(labels)
clf = SVC(kernel='linear', C=2.67, decision_function_shape= "ovo", max_iter=5000000, probability=True, verbose=True)
print train_data.shape
print labels.shape
clf.fit(train_data, labels)
joblib.dump(clf, '../models/' + detector_name + '_svm.pkl') | {
"content_hash": "34e2a77e0d29bd2aef91fd9c26f55e4f",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 120,
"avg_line_length": 29.176829268292682,
"alnum_prop": 0.6020898641588297,
"repo_name": "liang-chen/Vintager",
"id": "e0c40aee37f72ccba9351a28c53ade369a895e50",
"size": "4786",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/train.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42001"
}
],
"symlink_target": ""
} |
"""
Drivers for volumes.
"""
import time
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import excutils
from cinder import exception
from cinder.i18n import _, _LE, _LW
from cinder.image import image_utils
from cinder.openstack.common import fileutils
from cinder.openstack.common import log as logging
from cinder import utils
from cinder.volume import rpcapi as volume_rpcapi
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
volume_opts = [
cfg.IntOpt('num_shell_tries',
default=3,
help='Number of times to attempt to run flakey shell commands'),
cfg.IntOpt('reserved_percentage',
default=0,
help='The percentage of backend capacity is reserved'),
cfg.IntOpt('iscsi_num_targets',
default=100,
help='The maximum number of iSCSI target IDs per host'),
cfg.StrOpt('iscsi_target_prefix',
default='iqn.2010-10.org.openstack:',
help='Prefix for iSCSI volumes'),
cfg.StrOpt('iscsi_ip_address',
default='$my_ip',
help='The IP address that the iSCSI daemon is listening on'),
cfg.IntOpt('iscsi_port',
default=3260,
help='The port that the iSCSI daemon is listening on'),
cfg.IntOpt('num_volume_device_scan_tries',
deprecated_name='num_iscsi_scan_tries',
default=3,
help='The maximum number of times to rescan targets'
' to find volume'),
cfg.StrOpt('volume_backend_name',
default=None,
help='The backend name for a given driver implementation'),
cfg.BoolOpt('use_multipath_for_image_xfer',
default=False,
help='Do we attach/detach volumes in cinder using multipath '
'for volume to image and image to volume transfers?'),
cfg.StrOpt('volume_clear',
default='zero',
help='Method used to wipe old volumes (valid options are: '
'none, zero, shred)'),
cfg.IntOpt('volume_clear_size',
default=0,
help='Size in MiB to wipe at start of old volumes. 0 => all'),
cfg.StrOpt('volume_clear_ionice',
default=None,
help='The flag to pass to ionice to alter the i/o priority '
'of the process used to zero a volume after deletion, '
'for example "-c3" for idle only priority.'),
cfg.StrOpt('iscsi_helper',
default='tgtadm',
help='iSCSI target user-land tool to use. tgtadm is default, '
'use lioadm for LIO iSCSI support, iseradm for the ISER '
'protocol, or fake for testing.'),
cfg.StrOpt('volumes_dir',
default='$state_path/volumes',
help='Volume configuration file storage '
'directory'),
cfg.StrOpt('iet_conf',
default='/etc/iet/ietd.conf',
help='IET configuration file'),
cfg.StrOpt('lio_initiator_iqns',
default='',
help='This option is deprecated and unused. '
'It will be removed in the next release.'),
cfg.StrOpt('iscsi_iotype',
default='fileio',
help=('Sets the behavior of the iSCSI target '
'to either perform blockio or fileio '
'optionally, auto can be set and Cinder '
'will autodetect type of backing device')),
cfg.StrOpt('volume_dd_blocksize',
default='1M',
help='The default block size used when copying/clearing '
'volumes'),
cfg.StrOpt('volume_copy_blkio_cgroup_name',
default='cinder-volume-copy',
help='The blkio cgroup name to be used to limit bandwidth '
'of volume copy'),
cfg.IntOpt('volume_copy_bps_limit',
default=0,
help='The upper limit of bandwidth of volume copy. '
'0 => unlimited'),
cfg.StrOpt('iscsi_write_cache',
default='on',
help='Sets the behavior of the iSCSI target to either '
'perform write-back(on) or write-through(off). '
'This parameter is valid if iscsi_helper is set '
'to tgtadm or iseradm.'),
cfg.StrOpt('iscsi_protocol',
default='iscsi',
help='Determines the iSCSI protocol for new iSCSI volumes, '
'created with tgtadm or lioadm target helpers. In '
'order to enable RDMA, this parameter should be set '
'with the value "iser". The supported iSCSI protocol '
'values are "iscsi" and "iser".'),
cfg.StrOpt('driver_client_cert_key',
default=None,
help='The path to the client certificate key for verification, '
'if the driver supports it.'),
cfg.StrOpt('driver_client_cert',
default=None,
help='The path to the client certificate for verification, '
'if the driver supports it.'),
cfg.BoolOpt('driver_use_ssl',
default=False,
help='Tell driver to use SSL for connection to backend '
'storage if the driver supports it.'),
]
# for backward compatibility
iser_opts = [
cfg.IntOpt('num_iser_scan_tries',
default=3,
help='The maximum number of times to rescan iSER target'
'to find volume'),
cfg.IntOpt('iser_num_targets',
default=100,
help='The maximum number of iSER target IDs per host'),
cfg.StrOpt('iser_target_prefix',
default='iqn.2010-10.org.openstack:',
help='Prefix for iSER volumes'),
cfg.StrOpt('iser_ip_address',
default='$my_ip',
help='The IP address that the iSER daemon is listening on'),
cfg.IntOpt('iser_port',
default=3260,
help='The port that the iSER daemon is listening on'),
cfg.StrOpt('iser_helper',
default='tgtadm',
help='The name of the iSER target user-land tool to use'),
]
CONF = cfg.CONF
CONF.register_opts(volume_opts)
CONF.register_opts(iser_opts)
class VolumeDriver(object):
"""Executes commands relating to Volumes.
Base Driver for Cinder Volume Control Path,
This includes supported/required implementation
for API calls. Also provides *generic* implementation
of core features like cloning, copy_image_to_volume etc,
this way drivers that inherit from this base class and
don't offer their own impl can fall back on a general
solution here.
Key thing to keep in mind with this driver is that it's
intended that these drivers ONLY implement Control Path
details (create, delete, extend...), while transport or
data path related implementation should be a *member object*
that we call a connector. The point here is that for example
don't allow the LVM driver to implement iSCSI methods, instead
call whatever connector it has configured via conf file
(iSCSI{LIO, TGT, IET}, FC, etc).
In the base class and for example the LVM driver we do this via a has-a
relationship and just provide an interface to the specific connector
methods. How you do this in your own driver is of course up to you.
"""
VERSION = "N/A"
def __init__(self, execute=utils.execute, *args, **kwargs):
# NOTE(vish): db is set by Manager
self.db = kwargs.get('db')
self.host = kwargs.get('host')
self.configuration = kwargs.get('configuration', None)
if self.configuration:
self.configuration.append_config_values(volume_opts)
self.configuration.append_config_values(iser_opts)
self.set_execute(execute)
self._stats = {}
self.pools = []
# We set these mappings up in the base driver so they
# can be used by children
# (intended for LVM and BlockDevice, but others could use as well)
self.target_mapping = {
'fake': 'cinder.volume.targets.fake.FakeTarget',
'ietadm': 'cinder.volume.targets.iet.IetAdm',
'iseradm': 'cinder.volume.targets.iser.ISERTgtAdm',
'lioadm': 'cinder.volume.targets.lio.LioAdm',
'tgtadm': 'cinder.volume.targets.tgt.TgtAdm', }
# set True by manager after successful check_for_setup
self._initialized = False
def _is_non_recoverable(self, err, non_recoverable_list):
for item in non_recoverable_list:
if item in err:
return True
return False
def _try_execute(self, *command, **kwargs):
# NOTE(vish): Volume commands can partially fail due to timing, but
# running them a second time on failure will usually
# recover nicely.
non_recoverable = kwargs.pop('no_retry_list', [])
tries = 0
while True:
try:
self._execute(*command, **kwargs)
return True
except processutils.ProcessExecutionError as ex:
tries = tries + 1
if tries >= self.configuration.num_shell_tries or\
self._is_non_recoverable(ex.stderr, non_recoverable):
raise
LOG.exception(_LE("Recovering from a failed execute. "
"Try number %s"), tries)
time.sleep(tries ** 2)
def _detach_volume(self, context, attach_info, volume, properties,
force=False, remote=False):
"""Disconnect the volume from the host."""
# Use Brick's code to do attach/detach
connector = attach_info['connector']
connector.disconnect_volume(attach_info['conn']['data'],
attach_info['device'])
if remote:
# Call remote manager's terminate_connection which includes
# driver's terminate_connection and remove export
rpcapi = volume_rpcapi.VolumeAPI()
rpcapi.terminate_connection(context, volume, properties,
force=force)
else:
# Call local driver's terminate_connection and remove export.
# NOTE(avishay) This is copied from the manager's code - need to
# clean this up in the future.
try:
self.terminate_connection(volume, properties, force=force)
except Exception as err:
err_msg = (_('Unable to terminate volume connection: %(err)s')
% {'err': err})
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
try:
LOG.debug(("volume %s: removing export"), volume['id'])
self.remove_export(context, volume)
except Exception as ex:
LOG.exception(_LE("Error detaching volume %(volume)s, "
"due to remove export failure."),
{"volume": volume['id']})
raise exception.RemoveExportException(volume=volume['id'],
reason=ex)
def set_execute(self, execute):
self._execute = execute
def set_initialized(self):
self._initialized = True
@property
def initialized(self):
return self._initialized
def get_version(self):
"""Get the current version of this driver."""
return self.VERSION
def check_for_setup_error(self):
raise NotImplementedError()
def create_volume(self, volume):
"""Creates a volume. Can optionally return a Dictionary of
changes to the volume object to be persisted.
If volume_type extra specs includes
'capabilities:replication <is> True' the driver
needs to create a volume replica (secondary), and setup replication
between the newly created volume and the secondary volume.
Returned dictionary should include:
volume['replication_status'] = 'copying'
volume['replication_extended_status'] = driver specific value
volume['driver_data'] = driver specific value
"""
raise NotImplementedError()
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot.
If volume_type extra specs includes 'replication: <is> True'
the driver needs to create a volume replica (secondary),
and setup replication between the newly created volume and
the secondary volume.
"""
raise NotImplementedError()
def create_cloned_volume(self, volume, src_vref):
"""Creates a clone of the specified volume.
If volume_type extra specs includes 'replication: <is> True' the
driver needs to create a volume replica (secondary)
and setup replication between the newly created volume
and the secondary volume.
"""
raise NotImplementedError()
def create_replica_test_volume(self, volume, src_vref):
"""Creates a test replica clone of the specified replicated volume.
Create a clone of the replicated (secondary) volume.
"""
raise NotImplementedError()
def delete_volume(self, volume):
"""Deletes a volume.
If volume_type extra specs includes 'replication: <is> True'
then the driver needs to delete the volume replica too.
"""
raise NotImplementedError()
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
raise NotImplementedError()
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
raise NotImplementedError()
def local_path(self, volume):
raise NotImplementedError()
def get_volume_stats(self, refresh=False):
"""Return the current state of the volume service. If 'refresh' is
True, run the update first.
For replication the following state should be reported:
replication_support = True (None or false disables replication)
"""
return None
def copy_volume_data(self, context, src_vol, dest_vol, remote=None):
"""Copy data from src_vol to dest_vol."""
LOG.debug(('copy_data_between_volumes %(src)s -> %(dest)s.')
% {'src': src_vol['name'], 'dest': dest_vol['name']})
properties = utils.brick_get_connector_properties()
dest_remote = True if remote in ['dest', 'both'] else False
dest_orig_status = dest_vol['status']
try:
dest_attach_info = self._attach_volume(context,
dest_vol,
properties,
remote=dest_remote)
except Exception:
with excutils.save_and_reraise_exception():
msg = _("Failed to attach volume %(vol)s")
LOG.error(msg % {'vol': dest_vol['id']})
self.db.volume_update(context, dest_vol['id'],
{'status': dest_orig_status})
src_remote = True if remote in ['src', 'both'] else False
src_orig_status = src_vol['status']
try:
src_attach_info = self._attach_volume(context,
src_vol,
properties,
remote=src_remote)
except Exception:
with excutils.save_and_reraise_exception():
msg = _("Failed to attach volume %(vol)s")
LOG.error(msg % {'vol': src_vol['id']})
self.db.volume_update(context, src_vol['id'],
{'status': src_orig_status})
self._detach_volume(context, dest_attach_info, dest_vol,
properties, force=True, remote=dest_remote)
copy_error = True
try:
size_in_mb = int(src_vol['size']) * 1024 # vol size is in GB
volume_utils.copy_volume(
src_attach_info['device']['path'],
dest_attach_info['device']['path'],
size_in_mb,
self.configuration.volume_dd_blocksize)
copy_error = False
except Exception:
with excutils.save_and_reraise_exception():
msg = _("Failed to copy volume %(src)s to %(dest)s.")
LOG.error(msg % {'src': src_vol['id'], 'dest': dest_vol['id']})
finally:
self._detach_volume(context, dest_attach_info, dest_vol,
properties, force=copy_error,
remote=dest_remote)
self._detach_volume(context, src_attach_info, src_vol,
properties, force=copy_error,
remote=src_remote)
def copy_image_to_volume(self, context, volume, image_service, image_id):
"""Fetch the image from image_service and write it to the volume."""
LOG.debug(('copy_image_to_volume %s.') % volume['name'])
properties = utils.brick_get_connector_properties()
attach_info = self._attach_volume(context, volume, properties)
try:
image_utils.fetch_to_raw(context,
image_service,
image_id,
attach_info['device']['path'],
self.configuration.volume_dd_blocksize,
size=volume['size'])
finally:
self._detach_volume(context, attach_info, volume, properties)
def copy_volume_to_image(self, context, volume, image_service, image_meta):
"""Copy the volume to the specified image."""
LOG.debug(('copy_volume_to_image %s.') % volume['name'])
properties = utils.brick_get_connector_properties()
attach_info = self._attach_volume(context, volume, properties)
try:
image_utils.upload_volume(context,
image_service,
image_meta,
attach_info['device']['path'])
finally:
self._detach_volume(context, attach_info, volume, properties)
def _attach_volume(self, context, volume, properties, remote=False):
"""Attach the volume."""
if remote:
# Call remote manager's initialize_connection which includes
# driver's create_export and initialize_connection
rpcapi = volume_rpcapi.VolumeAPI()
conn = rpcapi.initialize_connection(context, volume, properties)
else:
# Call local driver's create_export and initialize_connection.
# NOTE(avishay) This is copied from the manager's code - need to
# clean this up in the future.
model_update = None
try:
LOG.debug(("Volume %s: creating export"), volume['id'])
model_update = self.create_export(context, volume)
if model_update:
volume = self.db.volume_update(context, volume['id'],
model_update)
except exception.CinderException as ex:
if model_update:
LOG.exception(_LE("Failed updating model of volume "
"%(volume_id)s with driver provided "
"model %(model)s") %
{'volume_id': volume['id'],
'model': model_update})
raise exception.ExportFailure(reason=ex)
try:
conn = self.initialize_connection(volume, properties)
except Exception as err:
try:
err_msg = (_('Unable to fetch connection information from '
'backend: %(err)s') % {'err': err})
LOG.error(err_msg)
LOG.debug("Cleaning up failed connect initialization.")
self.remove_export(context, volume)
except Exception as ex:
ex_msg = (_('Error encountered during cleanup '
'of a failed attach: %(ex)s') % {'ex': ex})
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=ex_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
return self._connect_device(conn)
def _connect_device(self, conn):
# Use Brick's code to do attach/detach
use_multipath = self.configuration.use_multipath_for_image_xfer
device_scan_attempts = self.configuration.num_volume_device_scan_tries
protocol = conn['driver_volume_type']
connector = utils.brick_get_connector(
protocol,
use_multipath=use_multipath,
device_scan_attempts=device_scan_attempts,
conn=conn)
device = connector.connect_volume(conn['data'])
host_device = device['path']
# Secure network file systems will NOT run as root.
root_access = not self.secure_file_operations_enabled()
if not connector.check_valid_device(host_device, root_access):
raise exception.DeviceUnavailable(path=host_device,
reason=(_("Unable to access "
"the backend storage "
"via the path "
"%(path)s.") %
{'path': host_device}))
return {'conn': conn, 'device': device, 'connector': connector}
def clone_image(self, context, volume,
image_location, image_meta,
image_service):
"""Create a volume efficiently from an existing image.
image_location is a string whose format depends on the
image service backend in use. The driver should use it
to determine whether cloning is possible.
image_meta is a dictionary that includes 'disk_format' (e.g.
raw, qcow2) and other image attributes that allow drivers to
decide whether they can clone the image without first requiring
conversion.
image_service is the reference of the image_service to use.
Note that this is needed to be passed here for drivers that
will want to fetch images from the image service directly.
Returns a dict of volume properties eg. provider_location,
boolean indicating whether cloning occurred
"""
return None, False
def backup_volume(self, context, backup, backup_service):
"""Create a new backup from an existing volume."""
volume = self.db.volume_get(context, backup['volume_id'])
LOG.debug(('Creating a new backup for volume %s.') %
volume['name'])
properties = utils.brick_get_connector_properties()
attach_info = self._attach_volume(context, volume, properties)
try:
volume_path = attach_info['device']['path']
# Secure network file systems will not chown files.
if self.secure_file_operations_enabled():
with fileutils.file_open(volume_path) as volume_file:
backup_service.backup(backup, volume_file)
else:
with utils.temporary_chown(volume_path):
with fileutils.file_open(volume_path) as volume_file:
backup_service.backup(backup, volume_file)
finally:
self._detach_volume(context, attach_info, volume, properties)
def restore_backup(self, context, backup, volume, backup_service):
"""Restore an existing backup to a new or existing volume."""
LOG.debug(('Restoring backup %(backup)s to '
'volume %(volume)s.') %
{'backup': backup['id'],
'volume': volume['name']})
properties = utils.brick_get_connector_properties()
attach_info = self._attach_volume(context, volume, properties)
try:
volume_path = attach_info['device']['path']
# Secure network file systems will not chown files.
if self.secure_file_operations_enabled():
with fileutils.file_open(volume_path, 'wb') as volume_file:
backup_service.restore(backup, volume['id'], volume_file)
else:
with utils.temporary_chown(volume_path):
with fileutils.file_open(volume_path, 'wb') as volume_file:
backup_service.restore(backup, volume['id'],
volume_file)
finally:
self._detach_volume(context, attach_info, volume, properties)
def clear_download(self, context, volume):
"""Clean up after an interrupted image copy."""
pass
def extend_volume(self, volume, new_size):
msg = _("Extend volume not implemented")
raise NotImplementedError(msg)
def migrate_volume(self, context, volume, host):
"""Migrate the volume to the specified host.
Returns a boolean indicating whether the migration occurred, as well as
model_update.
:param ctxt: Context
:param volume: A dictionary describing the volume to migrate
:param host: A dictionary describing the host to migrate to, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities.
"""
return (False, None)
def retype(self, context, volume, new_type, diff, host):
"""Convert the volume to be of the new type.
Returns either:
A boolean indicating whether the retype occurred, or
A tuple (retyped, model_update) where retyped is a boolean
indicating if the retype occurred, and the model_update includes
changes for the volume db.
if diff['extra_specs'] includes 'replication' then:
if ('True', _ ) then replication should be disabled:
Volume replica should be deleted
volume['replication_status'] should be changed to 'disabled'
volume['replication_extended_status'] = None
volume['replication_driver_data'] = None
if (_, 'True') then replication should be enabled:
Volume replica (secondary) should be created, and replication
should be setup between the volume and the newly created
replica
volume['replication_status'] = 'copying'
volume['replication_extended_status'] = driver specific value
volume['replication_driver_data'] = driver specific value
:param ctxt: Context
:param volume: A dictionary describing the volume to migrate
:param new_type: A dictionary describing the volume type to convert to
:param diff: A dictionary with the difference between the two types
:param host: A dictionary describing the host to migrate to, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities.
"""
return False, None
def accept_transfer(self, context, volume, new_user, new_project):
"""Accept the transfer of a volume for a new user/project."""
pass
def manage_existing(self, volume, existing_ref):
"""Brings an existing backend storage object under Cinder management.
existing_ref is passed straight through from the API request's
manage_existing_ref value, and it is up to the driver how this should
be interpreted. It should be sufficient to identify a storage object
that the driver should somehow associate with the newly-created cinder
volume structure.
There are two ways to do this:
1. Rename the backend storage object so that it matches the,
volume['name'] which is how drivers traditionally map between a
cinder volume and the associated backend storage object.
2. Place some metadata on the volume, or somewhere in the backend, that
allows other driver requests (e.g. delete, clone, attach, detach...)
to locate the backend storage object when required.
If the existing_ref doesn't make sense, or doesn't refer to an existing
backend storage object, raise a ManageExistingInvalidReference
exception.
The volume may have a volume_type, and the driver can inspect that and
compare against the properties of the referenced backend storage
object. If they are incompatible, raise a
ManageExistingVolumeTypeMismatch, specifying a reason for the failure.
"""
msg = _("Manage existing volume not implemented.")
raise NotImplementedError(msg)
def manage_existing_get_size(self, volume, existing_ref):
"""Return size of volume to be managed by manage_existing.
When calculating the size, round up to the next GB.
"""
msg = _("Manage existing volume not implemented.")
raise NotImplementedError(msg)
def unmanage(self, volume):
"""Removes the specified volume from Cinder management.
Does not delete the underlying backend storage object.
For most drivers, this will not need to do anything. However, some
drivers might use this call as an opportunity to clean up any
Cinder-specific configuration that they have associated with the
backend storage object.
"""
pass
def attach_volume(self, context, volume, instance_uuid, host_name,
mountpoint):
"""Callback for volume attached to instance or host."""
pass
def detach_volume(self, context, volume):
"""Callback for volume detached."""
pass
def do_setup(self, context):
"""Any initialization the volume driver does while starting."""
pass
def validate_connector(self, connector):
"""Fail if connector doesn't contain all the data needed by driver."""
pass
@staticmethod
def validate_connector_has_setting(connector, setting):
pass
def reenable_replication(self, context, volume):
"""Re-enable replication between the replica and primary volume.
This is used to re-enable/fix the replication between primary
and secondary. One use is as part of the fail-back process, when
you re-synchorize your old primary with the promoted volume
(the old replica).
Returns model_update for the volume to reflect the actions of the
driver.
The driver is expected to update the following entries:
'replication_status'
'replication_extended_status'
'replication_driver_data'
Possible 'replication_status' values (in model_update) are:
'error' - replication in error state
'copying' - replication copying data to secondary (inconsistent)
'active' - replication copying data to secondary (consistent)
'active-stopped' - replication data copy on hold (consistent)
'inactive' - replication data copy on hold (inconsistent)
Values in 'replication_extended_status' and 'replication_driver_data'
are managed by the driver.
:param context: Context
:param volume: A dictionary describing the volume
"""
msg = _("sync_replica not implemented.")
raise NotImplementedError(msg)
def get_replication_status(self, context, volume):
"""Query the actual volume replication status from the driver.
Returns model_update for the volume.
The driver is expected to update the following entries:
'replication_status'
'replication_extended_status'
'replication_driver_data'
Possible 'replication_status' values (in model_update) are:
'error' - replication in error state
'copying' - replication copying data to secondary (inconsistent)
'active' - replication copying data to secondary (consistent)
'active-stopped' - replication data copy on hold (consistent)
'inactive' - replication data copy on hold (inconsistent)
Values in 'replication_extended_status' and 'replication_driver_data'
are managed by the driver.
:param context: Context
:param volume: A dictionary describing the volume
"""
return None
def promote_replica(self, context, volume):
"""Promote the replica to be the primary volume.
Following this command, replication between the volumes at
the storage level should be stopped, the replica should be
available to be attached, and the replication status should
be in status 'inactive'.
Returns model_update for the volume.
The driver is expected to update the following entries:
'replication_status'
'replication_extended_status'
'replication_driver_data'
Possible 'replication_status' values (in model_update) are:
'error' - replication in error state
'inactive' - replication data copy on hold (inconsistent)
Values in 'replication_extended_status' and 'replication_driver_data'
are managed by the driver.
:param context: Context
:param volume: A dictionary describing the volume
"""
msg = _("promote_replica not implemented.")
raise NotImplementedError(msg)
# ####### Interface methods for DataPath (Connector) ########
def ensure_export(self, context, volume):
"""Synchronously recreates an export for a volume."""
raise NotImplementedError()
def create_export(self, context, volume):
"""Exports the volume.
Can optionally return a Dictionary of changes
to the volume object to be persisted.
"""
raise NotImplementedError()
def remove_export(self, context, volume):
"""Removes an export for a volume."""
raise NotImplementedError()
def initialize_connection(self, volume, connector):
"""Allow connection to connector and return connection info."""
raise NotImplementedError()
def terminate_connection(self, volume, connector, **kwargs):
"""Disallow connection from connector"""
def create_consistencygroup(self, context, group):
"""Creates a consistencygroup."""
raise NotImplementedError()
def delete_consistencygroup(self, context, group):
"""Deletes a consistency group."""
raise NotImplementedError()
def create_cgsnapshot(self, context, cgsnapshot):
"""Creates a cgsnapshot."""
raise NotImplementedError()
def delete_cgsnapshot(self, context, cgsnapshot):
"""Deletes a cgsnapshot."""
raise NotImplementedError()
def get_pool(self, volume):
"""Return pool name where volume reside on.
:param volume: The volume hosted by the the driver.
:return: name of the pool where given volume is in.
"""
return None
def secure_file_operations_enabled(self):
"""Determine if driver is running in Secure File Operations mode.
The Cinder Volume driver needs to query if this driver is running
in a secure file operations mode. By default, it is False: any driver
that does support secure file operations should override this method.
"""
return False
def update_migrated_volume(self, ctxt, volume, new_volume):
"""Return model update for migrated volume.
:param volume: The original volume that was migrated to this backend
:param new_volume: The migration volume object that was created on
this backend as part of the migration process
:return model_update to update DB with any needed changes
"""
return None
class ProxyVD(object):
"""Proxy Volume Driver to mark proxy drivers
If a driver uses a proxy class (e.g. by using __setattr__ and
__getattr__) without directly inheriting from base volume driver this
class can help marking them and retrieve the actual used driver object.
"""
def _get_driver(self):
"""Returns the actual driver object. Can be overloaded by the proxy.
"""
return getattr(self, "driver", None)
class ISCSIDriver(VolumeDriver):
"""Executes commands relating to ISCSI volumes.
We make use of model provider properties as follows:
``provider_location``
if present, contains the iSCSI target information in the same
format as an ietadm discovery
i.e. '<ip>:<port>,<portal> <target IQN>'
``provider_auth``
if present, contains a space-separated triple:
'<auth method> <auth username> <auth password>'.
`CHAP` is the only auth_method in use at the moment.
"""
def __init__(self, *args, **kwargs):
super(ISCSIDriver, self).__init__(*args, **kwargs)
def _do_iscsi_discovery(self, volume):
# TODO(justinsb): Deprecate discovery and use stored info
# NOTE(justinsb): Discovery won't work with CHAP-secured targets (?)
LOG.warn(_LW("ISCSI provider_location not "
"stored, using discovery"))
volume_name = volume['name']
try:
# NOTE(griff) We're doing the split straight away which should be
# safe since using '@' in hostname is considered invalid
(out, _err) = self._execute('iscsiadm', '-m', 'discovery',
'-t', 'sendtargets', '-p',
volume['host'].split('@')[0],
run_as_root=True)
except processutils.ProcessExecutionError as ex:
LOG.error(_LE("ISCSI discovery attempt failed for:%s") %
volume['host'].split('@')[0])
LOG.debug("Error from iscsiadm -m discovery: %s" % ex.stderr)
return None
for target in out.splitlines():
if (self.configuration.iscsi_ip_address in target
and volume_name in target):
return target
return None
def _get_iscsi_properties(self, volume):
"""Gets iscsi configuration
We ideally get saved information in the volume entity, but fall back
to discovery if need be. Discovery may be completely removed in future
The properties are:
:target_discovered: boolean indicating whether discovery was used
:target_iqn: the IQN of the iSCSI target
:target_portal: the portal of the iSCSI target
:target_lun: the lun of the iSCSI target
:volume_id: the id of the volume (currently used by xen)
:auth_method:, :auth_username:, :auth_password:
the authentication details. Right now, either auth_method is not
present meaning no authentication, or auth_method == `CHAP`
meaning use CHAP with the specified credentials.
:access_mode: the volume access mode allow client used
('rw' or 'ro' currently supported)
"""
properties = {}
location = volume['provider_location']
if location:
# provider_location is the same format as iSCSI discovery output
properties['target_discovered'] = False
else:
location = self._do_iscsi_discovery(volume)
if not location:
msg = (_("Could not find iSCSI export for volume %s") %
(volume['name']))
raise exception.InvalidVolume(reason=msg)
LOG.debug("ISCSI Discovery: Found %s" % (location))
properties['target_discovered'] = True
results = location.split(" ")
properties['target_portal'] = results[0].split(",")[0]
properties['target_iqn'] = results[1]
try:
properties['target_lun'] = int(results[2])
except (IndexError, ValueError):
if (self.configuration.volume_driver in
['cinder.volume.drivers.lvm.LVMISCSIDriver',
'cinder.volume.drivers.lvm.LVMISERDriver',
'cinder.volume.drivers.lvm.ThinLVMVolumeDriver'] and
self.configuration.iscsi_helper in ('tgtadm', 'iseradm')):
properties['target_lun'] = 1
else:
properties['target_lun'] = 0
properties['volume_id'] = volume['id']
auth = volume['provider_auth']
if auth:
(auth_method, auth_username, auth_secret) = auth.split()
properties['auth_method'] = auth_method
properties['auth_username'] = auth_username
properties['auth_password'] = auth_secret
geometry = volume.get('provider_geometry', None)
if geometry:
(physical_block_size, logical_block_size) = geometry.split()
properties['physical_block_size'] = physical_block_size
properties['logical_block_size'] = logical_block_size
encryption_key_id = volume.get('encryption_key_id', None)
properties['encrypted'] = encryption_key_id is not None
return properties
def _run_iscsiadm(self, iscsi_properties, iscsi_command, **kwargs):
check_exit_code = kwargs.pop('check_exit_code', 0)
(out, err) = self._execute('iscsiadm', '-m', 'node', '-T',
iscsi_properties['target_iqn'],
'-p', iscsi_properties['target_portal'],
*iscsi_command, run_as_root=True,
check_exit_code=check_exit_code)
LOG.debug("iscsiadm %s: stdout=%s stderr=%s" %
(iscsi_command, out, err))
return (out, err)
def _run_iscsiadm_bare(self, iscsi_command, **kwargs):
check_exit_code = kwargs.pop('check_exit_code', 0)
(out, err) = self._execute('iscsiadm',
*iscsi_command,
run_as_root=True,
check_exit_code=check_exit_code)
LOG.debug("iscsiadm %s: stdout=%s stderr=%s" %
(iscsi_command, out, err))
return (out, err)
def _iscsiadm_update(self, iscsi_properties, property_key, property_value,
**kwargs):
iscsi_command = ('--op', 'update', '-n', property_key,
'-v', property_value)
return self._run_iscsiadm(iscsi_properties, iscsi_command, **kwargs)
def initialize_connection(self, volume, connector):
"""Initializes the connection and returns connection info.
The iscsi driver returns a driver_volume_type of 'iscsi'.
The format of the driver data is defined in _get_iscsi_properties.
Example return value::
{
'driver_volume_type': 'iscsi'
'data': {
'target_discovered': True,
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000001',
'target_portal': '127.0.0.0.1:3260',
'volume_id': 1,
'access_mode': 'rw'
}
}
"""
# NOTE(jdg): Yes, this is duplicated in the volume/target
# drivers, for now leaving it as there are 3'rd party
# drivers that don't use target drivers, but inherit from
# this base class and use this init data
iscsi_properties = self._get_iscsi_properties(volume)
return {
'driver_volume_type': 'iscsi',
'data': iscsi_properties
}
def validate_connector(self, connector):
# iSCSI drivers require the initiator information
required = 'initiator'
if required not in connector:
err_msg = (_LE('The volume driver requires %(data)s '
'in the connector.'), {'data': required})
LOG.error(*err_msg)
raise exception.InvalidConnectorException(missing=required)
def terminate_connection(self, volume, connector, **kwargs):
pass
def get_volume_stats(self, refresh=False):
"""Get volume stats.
If 'refresh' is True, run update the stats first.
"""
if refresh:
self._update_volume_stats()
return self._stats
def _update_volume_stats(self):
"""Retrieve stats info from volume group."""
LOG.debug("Updating volume stats")
data = {}
backend_name = self.configuration.safe_get('volume_backend_name')
data["volume_backend_name"] = backend_name or 'Generic_iSCSI'
data["vendor_name"] = 'Open Source'
data["driver_version"] = '1.0'
data["storage_protocol"] = 'iSCSI'
data["pools"] = []
if self.pools:
for pool in self.pools:
new_pool = {}
new_pool.update(dict(
pool_name=pool,
total_capacity_gb=0,
free_capacity_gb=0,
provisioned_capacity_gb=0,
reserved_percentage=100,
QoS_support=False
))
data["pools"].append(new_pool)
else:
# No pool configured, the whole backend will be treated as a pool
single_pool = {}
single_pool.update(dict(
pool_name=data["volume_backend_name"],
total_capacity_gb=0,
free_capacity_gb=0,
provisioned_capacity_gb=0,
reserved_percentage=100,
QoS_support=False
))
data["pools"].append(single_pool)
self._stats = data
class FakeISCSIDriver(ISCSIDriver):
"""Logs calls instead of executing."""
def __init__(self, *args, **kwargs):
super(FakeISCSIDriver, self).__init__(execute=self.fake_execute,
*args, **kwargs)
def create_volume(self, volume):
pass
def check_for_setup_error(self):
"""No setup necessary in fake mode."""
pass
def initialize_connection(self, volume, connector):
return {
'driver_volume_type': 'iscsi',
'data': {'access_mode': 'rw'}
}
def terminate_connection(self, volume, connector, **kwargs):
pass
@staticmethod
def fake_execute(cmd, *_args, **_kwargs):
"""Execute that simply logs the command."""
LOG.debug("FAKE ISCSI: %s", cmd)
return (None, None)
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
pass
def create_cloned_volume(self, volume, src_vref):
"""Creates a clone of the specified volume."""
pass
def delete_volume(self, volume):
"""Deletes a volume."""
pass
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
pass
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
pass
def local_path(self, volume):
return '/tmp/volume-%s' % volume.id
def ensure_export(self, context, volume):
"""Synchronously recreates an export for a volume."""
pass
def create_export(self, context, volume):
"""Exports the volume. Can optionally return a Dictionary of changes
to the volume object to be persisted.
"""
pass
def remove_export(self, context, volume):
"""Removes an export for a volume."""
pass
class ISERDriver(ISCSIDriver):
"""Executes commands relating to ISER volumes.
We make use of model provider properties as follows:
``provider_location``
if present, contains the iSER target information in the same
format as an ietadm discovery
i.e. '<ip>:<port>,<portal> <target IQN>'
``provider_auth``
if present, contains a space-separated triple:
'<auth method> <auth username> <auth password>'.
`CHAP` is the only auth_method in use at the moment.
"""
def __init__(self, *args, **kwargs):
super(ISERDriver, self).__init__(*args, **kwargs)
# for backward compatibility
self.configuration.num_volume_device_scan_tries = \
self.configuration.num_iser_scan_tries
self.configuration.iscsi_num_targets = \
self.configuration.iser_num_targets
self.configuration.iscsi_target_prefix = \
self.configuration.iser_target_prefix
self.configuration.iscsi_ip_address = \
self.configuration.iser_ip_address
self.configuration.iscsi_port = self.configuration.iser_port
def initialize_connection(self, volume, connector):
"""Initializes the connection and returns connection info.
The iser driver returns a driver_volume_type of 'iser'.
The format of the driver data is defined in _get_iser_properties.
Example return value::
{
'driver_volume_type': 'iser'
'data': {
'target_discovered': True,
'target_iqn':
'iqn.2010-10.org.iser.openstack:volume-00000001',
'target_portal': '127.0.0.0.1:3260',
'volume_id': 1,
}
}
"""
iser_properties = self._get_iscsi_properties(volume)
return {
'driver_volume_type': 'iser',
'data': iser_properties
}
def _update_volume_stats(self):
"""Retrieve stats info from volume group."""
LOG.debug("Updating volume stats")
data = {}
backend_name = self.configuration.safe_get('volume_backend_name')
data["volume_backend_name"] = backend_name or 'Generic_iSER'
data["vendor_name"] = 'Open Source'
data["driver_version"] = '1.0'
data["storage_protocol"] = 'iSER'
data["pools"] = []
if self.pools:
for pool in self.pools:
new_pool = {}
new_pool.update(dict(
pool_name=pool,
total_capacity_gb=0,
free_capacity_gb=0,
reserved_percentage=100,
QoS_support=False
))
data["pools"].append(new_pool)
else:
# No pool configured, the whole backend will be treated as a pool
single_pool = {}
single_pool.update(dict(
pool_name=data["volume_backend_name"],
total_capacity_gb=0,
free_capacity_gb=0,
reserved_percentage=100,
QoS_support=False
))
data["pools"].append(single_pool)
self._stats = data
class FakeISERDriver(FakeISCSIDriver):
"""Logs calls instead of executing."""
def __init__(self, *args, **kwargs):
super(FakeISERDriver, self).__init__(execute=self.fake_execute,
*args, **kwargs)
def initialize_connection(self, volume, connector):
return {
'driver_volume_type': 'iser',
'data': {}
}
@staticmethod
def fake_execute(cmd, *_args, **_kwargs):
"""Execute that simply logs the command."""
LOG.debug("FAKE ISER: %s", cmd)
return (None, None)
class FibreChannelDriver(VolumeDriver):
"""Executes commands relating to Fibre Channel volumes."""
def __init__(self, *args, **kwargs):
super(FibreChannelDriver, self).__init__(*args, **kwargs)
def initialize_connection(self, volume, connector):
"""Initializes the connection and returns connection info.
The driver returns a driver_volume_type of 'fibre_channel'.
The target_wwn can be a single entry or a list of wwns that
correspond to the list of remote wwn(s) that will export the volume.
Example return values:
{
'driver_volume_type': 'fibre_channel'
'data': {
'target_discovered': True,
'target_lun': 1,
'target_wwn': '1234567890123',
'access_mode': 'rw'
}
}
or
{
'driver_volume_type': 'fibre_channel'
'data': {
'target_discovered': True,
'target_lun': 1,
'target_wwn': ['1234567890123', '0987654321321'],
'access_mode': 'rw'
}
}
"""
msg = _("Driver must implement initialize_connection")
raise NotImplementedError(msg)
def validate_connector(self, connector):
"""Fail if connector doesn't contain all the data needed by driver.
Do a check on the connector and ensure that it has wwnns, wwpns.
"""
self.validate_connector_has_setting(connector, 'wwpns')
self.validate_connector_has_setting(connector, 'wwnns')
@staticmethod
def validate_connector_has_setting(connector, setting):
"""Test for non-empty setting in connector."""
if setting not in connector or not connector[setting]:
msg = (_LE(
"FibreChannelDriver validate_connector failed. "
"No '%(setting)s'. Make sure HBA state is Online."),
{'setting': setting})
LOG.error(*msg)
raise exception.InvalidConnectorException(missing=setting)
| {
"content_hash": "4c439b047dd997d3c4c990d19ce9d44d",
"timestamp": "",
"source": "github",
"line_count": 1364,
"max_line_length": 79,
"avg_line_length": 40.25513196480939,
"alnum_prop": 0.5747978436657682,
"repo_name": "Accelerite/cinder",
"id": "11fa301b549f6b3029551c72d781f84785f12016",
"size": "55639",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/volume/driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "3322"
},
{
"name": "Python",
"bytes": "10152545"
},
{
"name": "Shell",
"bytes": "9905"
}
],
"symlink_target": ""
} |
"""Yaml CLI formatter."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from treadmill import yamlwrapper as yaml
def format(obj): # pylint: disable=W0622
"""Returns yaml representation of the object."""
return yaml.dump(obj,
default_flow_style=False,
explicit_start=True,
explicit_end=True)
| {
"content_hash": "3c5bccb32dfd55a7306c25e4c07209e6",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 52,
"avg_line_length": 29.4375,
"alnum_prop": 0.6411889596602972,
"repo_name": "captiosus/treadmill",
"id": "a98a0a3b4eac04d06240af72e0ca7fa7e2c4a7d4",
"size": "471",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "treadmill/formatter/yamlfmt.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "570"
},
{
"name": "Python",
"bytes": "2598791"
},
{
"name": "Ruby",
"bytes": "3712"
},
{
"name": "Shell",
"bytes": "58099"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
url(r'', include('main_site.urls'), name="main_site"),
url(r'^fonts/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': "%s/main_site/fonts" % settings.STATIC_ROOT,
}),
)
| {
"content_hash": "5eb0909a0fc794f848128a685d3af6d2",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 73,
"avg_line_length": 31.5,
"alnum_prop": 0.673469387755102,
"repo_name": "skoczen/encore-poem",
"id": "b887e2a10f426ceb8103e22b46007414fa2d83c8",
"size": "441",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/urls.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "21161"
},
{
"name": "Python",
"bytes": "19250"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'surveymaker.views.all_survey_responses', name='surveymaker_index'),
url(r'^(?P<survey_slug>.*)/new/$', 'surveymaker.views.survey_form', name='surveymaker_form'),
url(r'^admin/', include(admin.site.urls)),
)
| {
"content_hash": "98e24cf02c9d2ea868a2b8fa3fce51c0",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 97,
"avg_line_length": 37.5,
"alnum_prop": 0.704,
"repo_name": "willhardy/dynamic-models",
"id": "0137fea0a86c5b031d4036ee9dc6f8b56b6068e4",
"size": "375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "35415"
},
{
"name": "Shell",
"bytes": "4539"
}
],
"symlink_target": ""
} |
XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX XXXXXXXX X XXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXX
XXXXXX
XXXXXXXXX
XXXX XXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXX X XXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX X XXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXX XXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXX XXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXX XXXX XXXXXXXXXXX XXXXX XXX XXX XXXXXXXXXX XXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXX XXX X XXXX XXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXX XXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XX XXXXXXXX XXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXXX
XXXXXXX | {
"content_hash": "9436b34553b19399403b5856af39a2b1",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 160,
"avg_line_length": 44.26744186046512,
"alnum_prop": 0.8534278959810875,
"repo_name": "dnaextrim/django_adminlte_x",
"id": "d983f61e3fbb53d92a53c575c8dbadd0d2f660bc",
"size": "3807",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "adminlte/static/plugins/datatables/extensions/Responsive/examples/index.html.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "487538"
},
{
"name": "HTML",
"bytes": "1939871"
},
{
"name": "JavaScript",
"bytes": "2949324"
},
{
"name": "PHP",
"bytes": "3841"
},
{
"name": "Python",
"bytes": "11030"
}
],
"symlink_target": ""
} |
import json
import argparse
import os
parser = argparse.ArgumentParser(
description="top module generation helper to show the optional fields of a given pad")
parser.add_argument('--padType', '-t', action='store', required=False,
help="The pad type to show the mapping for")
parser.add_argument('--showPadTypes', '-s', action='store_true', default=False,
help="A flag to list all the available pad types in a given library.")
parser.add_argument('--padsLibs', '-p', action='store', required=True,
help="The pad libraries json description")
parser.add_argument('--padsLibName', '-n', action='store', required=True,
help="The name of the pad library to use from the given padsLibs")
args = parser.parse_args()
padType = args.padType
padsLibs = args.padsLibs
showPadTypes = args.showPadTypes
padsLibName = args.padsLibName
if padType is None and showPadTypes == False:
print("Please either specify a pad type to show the mapping for, or use the showPadTypes flag to list all possible types in the library")
else:
#description of the libraries parsed into a dict
if not os.path.exists(padsLibs): raise IOError("file not found: " + padsLibs)
padsLibsJSONOpener = open(padsLibs, 'r')
padsLibs_json = json.load(padsLibsJSONOpener)
padsLibsJSONOpener.close()
#Finding the used pads library
padsLib_json = dict()
for padsLib in padsLibs_json:
if padsLib["library_name"] == padsLibName:
padsLib_json = padsLib
break
if len(padsLib_json) == 0:
raise Exception("Used Pad Lib is not found in the given Pad Libraries JSON")
if showPadTypes:
for pad in padsLib_json["pads"]:
if "mapping" in pad:
print(pad["type"])
if padType is not None:
for pad in padsLib_json["pads"]:
if padType == pad["type"]:
print("print mapping for pad", padType)
print("key : mapped_to")
for mapping in pad["mapping"].keys():
print(mapping, " : ", pad["mapping"][mapping])
break
| {
"content_hash": "3018f380a676fedcbd90f660994d15da",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 145,
"avg_line_length": 40.50847457627118,
"alnum_prop": 0.5761506276150627,
"repo_name": "efabless/openlane",
"id": "029cdf82105887f96ddc87855184772d5950dfaa",
"size": "2976",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/topModuleGen/src/padHelper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Coq",
"bytes": "128005"
},
{
"name": "Dockerfile",
"bytes": "52446"
},
{
"name": "HTML",
"bytes": "218625"
},
{
"name": "Logos",
"bytes": "26097"
},
{
"name": "Makefile",
"bytes": "2470"
},
{
"name": "Perl",
"bytes": "5589"
},
{
"name": "Python",
"bytes": "246219"
},
{
"name": "Shell",
"bytes": "21834"
},
{
"name": "Tcl",
"bytes": "168706"
},
{
"name": "Verilog",
"bytes": "4684813"
}
],
"symlink_target": ""
} |
import wx
from src.wizard.controller.frmSampFeatSelectPanel import SampFeatSelectPanel
from src.wizard.controller.frmVariableSelectPanel import VariableSelectPanel
from src.wizard.controller.frmUnitSelectPanel import UnitSelectPanel
from src.wizard.controller.frmProcLevelSelectPanel import ProcLevelSelectPanel
from src.wizard.controller.frmActionsSelectPanel import ActionsSelectPanel
# from src.wizard.view.clsResultPage import ResultPageView
from src.wizard.controller.frmResultSummaryPanel import ResultSummaryPanel
from datetime import datetime
class WizardDialog(wx.Dialog):
def __init__(self, parent, database=None, title="Wizard Dialog",
result=None,
size=wx.DefaultSize,
pos=wx.DefaultPosition,
style=wx.DEFAULT_DIALOG_STYLE):
self.existingResult = result
pre = wx.PreDialog()
pre.Create(parent, wx.ID_ANY, title, pos, size, style)
self.PostCreate(pre)
self.mainSizer = wx.BoxSizer(wx.VERTICAL)
self.pnlSizer = wx.BoxSizer(wx.VERTICAL)
self.btnSizer = wx.BoxSizer(wx.HORIZONTAL)
self.pnlList = []
self.currentPnl = None
self.database = database
self.addButtons()
self.centerSelf()
self.SetSizer(self.mainSizer)
self.mainSizer.Fit(self)
self.returnValue = wx.ID_ANY
self.Bind(wx.EVT_CLOSE, self.on_close)
def on_close(self, event):
self.Destroy()
def centerSelf(self):
self.CenterOnParent()
def addButtons(self):
self.btnCancel = wx.Button(self, wx.ID_CANCEL, "Cancel")
self.btnNext = wx.Button(self, wx.ID_ANY, "Finish")
self.btnPrev = wx.Button(self, wx.ID_ANY, "< Back")
self.btnSizer.Add(self.btnCancel, 0,
wx.ALL|wx.ALIGN_RIGHT, 5)
self.btnSizer.Add(self.btnPrev, 0,
wx.ALL|wx.ALIGN_RIGHT, 5)
self.btnSizer.Add(self.btnNext, 0,
wx.ALL|wx.ALIGN_RIGHT, 5)
self.mainSizer.Add(self.pnlSizer, 1, wx.ALL|wx.EXPAND, 5)
self.mainSizer.Add(self.btnSizer, 0, wx.ALL|wx.ALIGN_RIGHT, 5)
self.btnPrev.Enable(False)
self.btnNext.Enable(False)
self.btnNext.Bind(wx.EVT_BUTTON, self.onFinish)
self.btnPrev.Bind(wx.EVT_BUTTON, self.onPrev)
def addPage(self, pnl, **kwargs):
newPnl = pnl(self, existing_result= self.existingResult, **kwargs)
newPnl.Hide()
self.pnlList.append(newPnl)
self.pnlSizer.Add(newPnl, 1, wx.ALL|wx.EXPAND, 5)
self.CenterOnParent()
if len(self.pnlList) == 1:
self.btnNext.Unbind(wx.EVT_BUTTON)
self.btnNext.SetLabel("Next >")
self.btnNext.Bind(wx.EVT_BUTTON, self.onNext)
def getSelections(self):
data = {}
i = 0
for pnl in self.pnlList:
try:
data[i] = pnl.list_ctrl.GetSelectedObject()
i = i + 1
except AttributeError:
continue
return data
def ShowModal(self):
if self.pnlList:
self.currentPnl = self.pnlList[0]
self.currentPnl.Show()
self.mainSizer.Fit(self)
self.CenterOnParent()
super(WizardDialog, self).ShowModal()
return self.returnValue
# ********************** #
# *** Event Handlers *** #
# ********************** #
def onFinish(self, event):
# self.result = self.pnlList[-1].createResult()
#
# if self.existingResult:
# print self.existingResult
# else:
# if self.result:
# self.returnValue = wx.ID_OK
# self.Close()
#
# event.Skip()
if self.existingResult is None:
self.__create_new_result()
else:
self.__update_existing_result()
def __create_new_result(self):
self.result = self.pnlList[-1].createResult()
if self.result:
self.returnValue = wx.ID_OK
self.Close()
def __update_existing_result(self):
if not isinstance(self.currentPnl, ResultSummaryPanel):
raise Exception("self.currentPanel must be of type ResultSummaryPanel")
result = self.existingResult
result.SampledMediumCV = self.currentPnl.comboSamp.GetValue()
result.AggregationStatisticCV = self.currentPnl.comboAgg.GetValue()
if self.currentPnl.comboStatus.GetValue() != "":
result.StatusCV = self.currentPnl.comboStatus.GetValue()
for unit in self.currentPnl.length_units:
if unit.UnitsName == self.currentPnl.comboXUnits.GetValue():
result.XLocationUnitsID = unit.UnitsID
if unit.UnitsName == self.currentPnl.comboYUnits.GetValue():
result.YLocationUnitsID = unit.UnitsID
if unit.UnitsName == self.currentPnl.comboZUnits.GetValue():
result.ZLocationUnitsID = unit.UnitsID
for time in self.currentPnl.time_units:
if time.UnitsName == self.currentPnl.comboIntendedUnits.GetValue():
result.IntendedTimeSpacingUnitsID = time.UnitsID
date = self.currentPnl.datePickerResult.GetValue()
year = date.Year
month = date.Month
day = date.Day
date = self.currentPnl.timeResult.GetWxDateTime()
hour = date.Hour
minute = date.Minute
second = date.Second
date = datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second)
result.ResultDateTime = date
result.XLocation = self.currentPnl.txtX.GetValue()
result.YLocation = self.currentPnl.txtY.GetValue()
result.ZLocation = self.currentPnl.txtZ.GetValue()
result.IntendedTimeSpacing = self.currentPnl.txtIntended.GetValue()
# self.database.getUpdateSession().updateResult(pass in result object)
session = self.database.getUpdateSession()
session.updateResult(result=result)
self.returnValue = wx.ID_OK
self.Close()
def onPrev(self, event):
self.currentPnl.Hide()
self.currentPnl = self.pnlList[self.pnlList.index(self.currentPnl)-1]
self.currentPnl.Show()
self.Layout()
self.mainSizer.Fit(self)
if self.currentPnl == self.pnlList[0]:
self.btnPrev.Enable(False)
else:
self.btnPrev.Enable(True)
if self.currentPnl == self.pnlList[-1]:
self.btnNext.SetLabel("Finish")
self.btnNext.Unbind(wx.EVT_BUTTON)
self.btnNext.Bind(wx.EVT_BUTTON, self.onFinish)
else:
self.btnNext.SetLabel("Next >")
self.btnNext.Unbind(wx.EVT_BUTTON)
self.btnNext.Bind(wx.EVT_BUTTON, self.onNext)
event.Skip()
def onNext(self, event):
self.btnNext.Enable(False)
self.currentPnl.Hide()
self.currentPnl = self.pnlList[self.pnlList.index(self.currentPnl)+1]
self.currentPnl.Show()
self.Layout()
self.mainSizer.Fit(self)
if self.currentPnl == self.pnlList[0]:
self.btnPrev.Enable(False)
else:
self.btnPrev.Enable(True)
if self.currentPnl == self.pnlList[-1]:
self.CenterOnParent()
self.currentPnl.check_required_fields()
self.btnNext.SetLabel("Finish")
self.btnNext.Unbind(wx.EVT_BUTTON)
self.btnNext.Bind(wx.EVT_BUTTON, self.onFinish)
else:
self.btnNext.SetLabel("Next >")
self.btnNext.Unbind(wx.EVT_BUTTON)
self.btnNext.Bind(wx.EVT_BUTTON, self.onNext)
event.Skip()
if __name__ == '__main__':
app = wx.App(False)
wiz = WizardDialog(None)
wiz.addPage(SampFeatSelectPanel)
wiz.addPage(VariableSelectPanel)
wiz.addPage(UnitSelectPanel)
wiz.addPage(ProcLevelSelectPanel)
wiz.addPage(ActionsSelectPanel)
# wiz.addPage(ResultPageView)
wiz.ShowModal()
app.MainLoop()
| {
"content_hash": "4ba063d94e9b7e4a43308f709f6258fa",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 97,
"avg_line_length": 34.52765957446808,
"alnum_prop": 0.6093172294799113,
"repo_name": "ODM2/ODM2StreamingDataLoader",
"id": "6fd783ddf50ba8daaa41bd43da4402dd8b956881",
"size": "8114",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/wizard/controller/WizardDialog.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Inno Setup",
"bytes": "4394"
},
{
"name": "Python",
"bytes": "476381"
},
{
"name": "Shell",
"bytes": "787"
}
],
"symlink_target": ""
} |
from parts import (
ArmGrips,
ArmWrist,
ArmElbow,
ArmShoulder,
ArmBase,
ArmLed,
)
from constants import (
PART_BASE,
PART_ELBOW,
PART_GRIPS,
PART_LED,
PART_SHOULDER,
PART_WRIST
)
from device import ArmDevice
from locker import on_lock
class ArmManager(object):
grips = ArmGrips()
wrist = ArmWrist()
elbow = ArmElbow()
shoulder = ArmShoulder()
base = ArmBase()
led = ArmLed()
parts = {
PART_GRIPS: grips,
PART_WRIST: wrist,
PART_ELBOW: elbow,
PART_SHOULDER: shoulder,
PART_BASE: base,
PART_LED: led,
}
@classmethod
def get_part(cls, part_id):
return cls.parts.get(part_id)
@classmethod
def is_on(cls):
return ArmDevice.is_on()
@classmethod
@on_lock
def _action(cls, part_id, duration):
is_acted = False
if part_id in cls.parts.keys() and duration:
try:
is_acted = cls.get_part(part_id).action(duration)
except Exception as e:
print e
pass
return is_acted
@classmethod
def decr(cls, part_id, duration, *args):
duration = -duration
return cls._action(part_id, duration)
@classmethod
def incr(cls, part_id, duration, *args):
return cls._action(part_id, duration)
| {
"content_hash": "071b3b248ea0a5623fce29994ee66086",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 65,
"avg_line_length": 21.153846153846153,
"alnum_prop": 0.5709090909090909,
"repo_name": "mapler/tuesday",
"id": "260b3a51f3120c7e09b18839cd90fbfc02413057",
"size": "1400",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "arm/manager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "755"
},
{
"name": "HTML",
"bytes": "4479"
},
{
"name": "JavaScript",
"bytes": "2114"
},
{
"name": "Python",
"bytes": "19174"
}
],
"symlink_target": ""
} |
"""Simple Filter support"""
import re
import fnmatch
class BaseFilter(object):
"""Filter a string based on a list of regular expression or glob patterns.
This should be inherited and the __call__ overriden with a real
implementation
"""
__slots__ = ('patterns', '_re_options')
def __init__(self, patterns, case_insensitive=True):
self.patterns = list(patterns)
if case_insensitive:
self._re_options = re.M|re.U|re.I
else:
self._re_options = re.M|re.U
def add_glob(self, glob):
"""Add a glob pattern to this filter
Internally all globs are converted to regular expressions using
`fnmatch.translate()`
:param glob: glob pattern to add
:type glob: str
"""
self.patterns.append(fnmatch.translate(glob))
def add_regex(self, regex):
"""Add a regular expression pattern to this filter
:param regex: regular expression pattern to add to this filter.
:type regex: str
"""
self.patterns.append(regex)
def __call__(self, item):
"""Run this filter - return True if filtered and False otherwise.
:param item: item to check against this filter
:type item: str
"""
raise NotImplementedError()
def __repr__(self):
return self.__class__.__name__ + '(patterns=%r)' % self.patterns
class IncludeFilter(BaseFilter):
"""Include only objects that match *all* assigned filters"""
def __call__(self, item):
for _pattern in self.patterns:
if re.match(_pattern, item, self._re_options) is not None:
return False
else:
return True
class ExcludeFilter(BaseFilter):
"""Exclude objects that match any filter"""
def __call__(self, item):
for _pattern in self.patterns:
if re.match(_pattern, item, self._re_options) is not None:
return True
else:
return False
def exclude_glob(*pattern):
"""Create an exclusion filter from a glob pattern"""
result = []
for pat in pattern:
result.append(fnmatch.translate(pat))
return ExcludeFilter(result)
def include_glob(*pattern):
"""Create an inclusion filter from glob patterns"""
result = []
for pat in pattern:
result.append(fnmatch.translate(pat))
return IncludeFilter(result)
def include_glob_qualified(*pattern):
"""Create an inclusion filter from glob patterns
Additionally ensure the pattern is for a qualified table name.
If not '.' is found in the name, this implies an implicit *.
before the name
"""
result = []
for pat in pattern:
if '.' not in pat:
pat = '*.' + pat
result.append(pat)
return include_glob(*result)
def exclude_glob_qualified(*pattern):
"""Create an exclusion filter from glob patterns
Additionally ensure the pattern is for a qualified table name.
If not '.' is found in the name, this implies an implicit *.
before the name
"""
result = []
for pat in pattern:
if '.' not in pat:
pat = '*.' + pat
result.append(pat)
return exclude_glob(*result)
| {
"content_hash": "0bd429f8c518de3566eae6771009b14c",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 78,
"avg_line_length": 29.126126126126128,
"alnum_prop": 0.6102690999072069,
"repo_name": "m00dawg/holland",
"id": "a70b2e068497ca4536a90b1bf89a90621f5b313e",
"size": "3233",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/holland.lib.mysql/holland/lib/mysql/schema/filter.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "7884"
},
{
"name": "Python",
"bytes": "1720427"
},
{
"name": "Roff",
"bytes": "3761"
},
{
"name": "Shell",
"bytes": "5001"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.